[ 514.880877] env[61772]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=61772) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 514.881359] env[61772]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=61772) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 514.881484] env[61772]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=61772) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 514.881799] env[61772]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 515.006808] env[61772]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=61772) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 515.015495] env[61772]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.009s {{(pid=61772) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 515.133188] env[61772]: INFO nova.virt.driver [None req-4fa2df3a-dfd1-4ec3-8040-3eb31440d0e4 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 515.209673] env[61772]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=61772) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 515.209873] env[61772]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.002s {{(pid=61772) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 515.209958] env[61772]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=61772) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 518.280397] env[61868]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=61868) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 518.280870] env[61868]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=61868) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 518.280870] env[61868]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=61868) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 518.281211] env[61868]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 518.333364] env[61868]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=61868) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 518.342073] env[61868]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.009s {{(pid=61868) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 518.413188] env[61868]: INFO nova.virt.driver [None req-6152439e-0013-4452-b4c8-bc99a4b298a1 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 518.452314] env[61868]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 518.452507] env[61868]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 518.452507] env[61868]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=61868) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 521.656875] env[61868]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-fe53aee0-6b16-4f36-b966-8532e25dca07 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.674424] env[61868]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=61868) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 521.674583] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-c4818060-e000-4ec4-8606-88e36e3734d7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.717454] env[61868]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 60c6b. [ 521.717655] env[61868]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.265s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 521.718302] env[61868]: INFO nova.virt.vmwareapi.driver [None req-6152439e-0013-4452-b4c8-bc99a4b298a1 None None] VMware vCenter version: 7.0.3 [ 521.721835] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f9118c9-9aaf-40b5-a066-5a48327bfc21 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.740082] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddcc7a66-85dc-4f0b-bee6-b5c30f8b22f0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.746662] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e520e0b3-29cf-4e7a-951a-0c8a2120653e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.753915] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2fff032-aff2-4c2b-8291-b30ff5110853 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.768121] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeba53b2-9924-4cac-8821-7972700e5bad {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.775328] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a5058a4-79cb-4f73-b4ef-5ba15d371cf9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.807657] env[61868]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-b5334f50-00a9-4045-bab5-9de19277253c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.814224] env[61868]: DEBUG nova.virt.vmwareapi.driver [None req-6152439e-0013-4452-b4c8-bc99a4b298a1 None None] Extension org.openstack.compute already exists. {{(pid=61868) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 521.817198] env[61868]: INFO nova.compute.provider_config [None req-6152439e-0013-4452-b4c8-bc99a4b298a1 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 521.829615] env[61868]: DEBUG nova.context [None req-6152439e-0013-4452-b4c8-bc99a4b298a1 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),a2a4a4fa-2773-4737-9cf8-354e700f962b(cell1) {{(pid=61868) load_cells /opt/stack/nova/nova/context.py:464}} [ 521.831799] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 521.832037] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 521.832909] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 521.833389] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] Acquiring lock "a2a4a4fa-2773-4737-9cf8-354e700f962b" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 521.833588] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] Lock "a2a4a4fa-2773-4737-9cf8-354e700f962b" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 521.834579] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] Lock "a2a4a4fa-2773-4737-9cf8-354e700f962b" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 521.866889] env[61868]: INFO dbcounter [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] Registered counter for database nova_cell0 [ 521.875349] env[61868]: INFO dbcounter [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] Registered counter for database nova_cell1 [ 521.880589] env[61868]: DEBUG oslo_db.sqlalchemy.engines [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_ENGINE_SUBSTITUTION {{(pid=61868) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 521.884436] env[61868]: DEBUG dbcounter [-] [61868] Writer thread running {{(pid=61868) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 521.884787] env[61868]: DEBUG oslo_db.sqlalchemy.engines [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_ENGINE_SUBSTITUTION {{(pid=61868) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 521.887241] env[61868]: ERROR nova.db.main.api [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 521.887241] env[61868]: result = function(*args, **kwargs) [ 521.887241] env[61868]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 521.887241] env[61868]: return func(*args, **kwargs) [ 521.887241] env[61868]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 521.887241] env[61868]: result = fn(*args, **kwargs) [ 521.887241] env[61868]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 521.887241] env[61868]: return f(*args, **kwargs) [ 521.887241] env[61868]: File "/opt/stack/nova/nova/objects/service.py", line 548, in _db_service_get_minimum_version [ 521.887241] env[61868]: return db.service_get_minimum_version(context, binaries) [ 521.887241] env[61868]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 521.887241] env[61868]: _check_db_access() [ 521.887241] env[61868]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 521.887241] env[61868]: stacktrace = ''.join(traceback.format_stack()) [ 521.887241] env[61868]: [ 521.889059] env[61868]: DEBUG dbcounter [-] [61868] Writer thread running {{(pid=61868) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 521.890049] env[61868]: ERROR nova.db.main.api [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 521.890049] env[61868]: result = function(*args, **kwargs) [ 521.890049] env[61868]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 521.890049] env[61868]: return func(*args, **kwargs) [ 521.890049] env[61868]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 521.890049] env[61868]: result = fn(*args, **kwargs) [ 521.890049] env[61868]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 521.890049] env[61868]: return f(*args, **kwargs) [ 521.890049] env[61868]: File "/opt/stack/nova/nova/objects/service.py", line 548, in _db_service_get_minimum_version [ 521.890049] env[61868]: return db.service_get_minimum_version(context, binaries) [ 521.890049] env[61868]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 521.890049] env[61868]: _check_db_access() [ 521.890049] env[61868]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 521.890049] env[61868]: stacktrace = ''.join(traceback.format_stack()) [ 521.890049] env[61868]: [ 521.890736] env[61868]: WARNING nova.objects.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] Failed to get minimum service version for cell a2a4a4fa-2773-4737-9cf8-354e700f962b [ 521.890906] env[61868]: WARNING nova.objects.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 521.891365] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] Acquiring lock "singleton_lock" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 521.891541] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] Acquired lock "singleton_lock" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 521.891799] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] Releasing lock "singleton_lock" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 521.892150] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] Full set of CONF: {{(pid=61868) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 521.892296] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ******************************************************************************** {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2589}} [ 521.892504] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] Configuration options gathered from: {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2590}} [ 521.892550] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2591}} [ 521.892740] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2592}} [ 521.892894] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ================================================================================ {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2594}} [ 521.893108] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] allow_resize_to_same_host = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.893280] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] arq_binding_timeout = 300 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.893414] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] backdoor_port = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.893543] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] backdoor_socket = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.893709] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] block_device_allocate_retries = 60 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.893871] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] block_device_allocate_retries_interval = 3 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.894042] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cert = self.pem {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.894213] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.894388] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] compute_monitors = [] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.894559] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] config_dir = [] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.894731] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] config_drive_format = iso9660 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.894863] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.895027] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] config_source = [] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.895193] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] console_host = devstack {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.895356] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] control_exchange = nova {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.895517] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cpu_allocation_ratio = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.895677] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] daemon = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.895846] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] debug = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.896011] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] default_access_ip_network_name = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.896217] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] default_availability_zone = nova {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.896373] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] default_ephemeral_format = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.896533] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] default_green_pool_size = 1000 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.896766] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.896928] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] default_schedule_zone = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.897160] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] disk_allocation_ratio = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.897261] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] enable_new_services = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.897420] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] enabled_apis = ['osapi_compute'] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.897585] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] enabled_ssl_apis = [] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.897746] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] flat_injected = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.897906] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] force_config_drive = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.898064] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] force_raw_images = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.898231] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] graceful_shutdown_timeout = 5 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.898394] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] heal_instance_info_cache_interval = 60 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.898618] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] host = cpu-1 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.898796] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.898961] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] initial_disk_allocation_ratio = 1.0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.899119] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] initial_ram_allocation_ratio = 1.0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.899329] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.899491] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] instance_build_timeout = 0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.899652] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] instance_delete_interval = 300 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.899820] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] instance_format = [instance: %(uuid)s] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.899985] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] instance_name_template = instance-%08x {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.900162] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] instance_usage_audit = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.900335] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] instance_usage_audit_period = month {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.900532] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.900711] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] instances_path = /opt/stack/data/nova/instances {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.900881] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] internal_service_availability_zone = internal {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.901039] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] key = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.901200] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] live_migration_retry_count = 30 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.901367] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] log_config_append = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.901559] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.901729] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] log_dir = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.901887] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] log_file = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.902019] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] log_options = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.902181] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] log_rotate_interval = 1 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.902353] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] log_rotate_interval_type = days {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.902524] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] log_rotation_type = none {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.902681] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.902881] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.902997] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.903167] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.903299] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.903466] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] long_rpc_timeout = 1800 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.903632] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] max_concurrent_builds = 10 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.903794] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] max_concurrent_live_migrations = 1 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.903954] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] max_concurrent_snapshots = 5 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.904131] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] max_local_block_devices = 3 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.904295] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] max_logfile_count = 30 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.904459] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] max_logfile_size_mb = 200 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.904621] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] maximum_instance_delete_attempts = 5 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.904791] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] metadata_listen = 0.0.0.0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.904960] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] metadata_listen_port = 8775 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.905129] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] metadata_workers = 2 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.905290] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] migrate_max_retries = -1 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.905456] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] mkisofs_cmd = genisoimage {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.905666] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] my_block_storage_ip = 10.180.1.21 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.905797] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] my_ip = 10.180.1.21 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.905963] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] network_allocate_retries = 0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.906145] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.906315] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] osapi_compute_listen = 0.0.0.0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.906481] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] osapi_compute_listen_port = 8774 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.906651] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] osapi_compute_unique_server_name_scope = {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.906821] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] osapi_compute_workers = 2 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.906985] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] password_length = 12 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.907148] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] periodic_enable = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.907311] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] periodic_fuzzy_delay = 60 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.907482] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] pointer_model = usbtablet {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.907653] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] preallocate_images = none {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.907813] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] publish_errors = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.907943] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] pybasedir = /opt/stack/nova {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.908133] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ram_allocation_ratio = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.908298] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] rate_limit_burst = 0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.908468] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] rate_limit_except_level = CRITICAL {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.908629] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] rate_limit_interval = 0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.908788] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] reboot_timeout = 0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.908946] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] reclaim_instance_interval = 0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.909103] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] record = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.909273] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] reimage_timeout_per_gb = 60 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.909441] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] report_interval = 120 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.909606] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] rescue_timeout = 0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.909770] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] reserved_host_cpus = 0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.909931] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] reserved_host_disk_mb = 0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.910093] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] reserved_host_memory_mb = 512 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.910255] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] reserved_huge_pages = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.910414] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] resize_confirm_window = 0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.910613] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] resize_fs_using_block_device = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.910789] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] resume_guests_state_on_host_boot = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.910963] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.911129] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] rpc_response_timeout = 60 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.911290] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] run_external_periodic_tasks = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.911471] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] running_deleted_instance_action = reap {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.911651] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] running_deleted_instance_poll_interval = 1800 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.911814] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] running_deleted_instance_timeout = 0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.912049] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] scheduler_instance_sync_interval = 120 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.912156] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] service_down_time = 720 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.912327] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] servicegroup_driver = db {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.912489] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] shelved_offload_time = 0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.912650] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] shelved_poll_interval = 3600 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.912843] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] shutdown_timeout = 0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.913015] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] source_is_ipv6 = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.913177] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ssl_only = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.913436] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.913606] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] sync_power_state_interval = 600 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.913770] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] sync_power_state_pool_size = 1000 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.913937] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] syslog_log_facility = LOG_USER {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.914094] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] tempdir = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.914254] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] timeout_nbd = 10 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.914421] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] transport_url = **** {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.914584] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] update_resources_interval = 0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.914744] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] use_cow_images = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.914902] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] use_eventlog = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.915061] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] use_journal = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.915218] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] use_json = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.915374] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] use_rootwrap_daemon = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.915534] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] use_stderr = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.915690] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] use_syslog = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.915844] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vcpu_pin_set = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.916071] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vif_plugging_is_fatal = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.916204] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vif_plugging_timeout = 300 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.916369] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] virt_mkfs = [] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.916532] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] volume_usage_poll_interval = 0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.916695] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] watch_log_file = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.916862] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] web = /usr/share/spice-html5 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 521.917054] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_concurrency.disable_process_locking = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.917371] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.917554] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.917723] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.917896] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.918065] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.918233] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.918439] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api.auth_strategy = keystone {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.918620] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api.compute_link_prefix = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.918799] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.918972] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api.dhcp_domain = novalocal {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.919139] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api.enable_instance_password = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.919302] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api.glance_link_prefix = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.919468] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.919639] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.919800] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api.instance_list_per_project_cells = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.919961] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api.list_records_by_skipping_down_cells = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.920137] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api.local_metadata_per_cell = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.920306] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api.max_limit = 1000 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.920524] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api.metadata_cache_expiration = 15 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.920693] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api.neutron_default_tenant_id = default {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.920863] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api.use_neutron_default_nets = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.921034] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.921197] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.921364] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.921563] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.921742] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api.vendordata_dynamic_targets = [] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.921910] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api.vendordata_jsonfile_path = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.922165] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.922277] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.backend = dogpile.cache.memcached {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.922445] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.backend_argument = **** {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.922617] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.config_prefix = cache.oslo {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.922791] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.dead_timeout = 60.0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.922960] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.debug_cache_backend = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.923154] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.enable_retry_client = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.923274] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.enable_socket_keepalive = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.923446] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.enabled = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.923648] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.expiration_time = 600 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.923859] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.hashclient_retry_attempts = 2 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.924102] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.hashclient_retry_delay = 1.0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.924282] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.memcache_dead_retry = 300 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.924456] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.memcache_password = **** {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.924623] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.924783] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.924946] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.memcache_pool_maxsize = 10 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.925108] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.925270] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.memcache_sasl_enabled = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.925451] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.925623] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.memcache_socket_timeout = 1.0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.925796] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.memcache_username = {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.925965] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.proxies = [] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.926133] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.retry_attempts = 2 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.926301] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.retry_delay = 0.0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.926468] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.socket_keepalive_count = 1 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.926633] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.socket_keepalive_idle = 1 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.926796] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.socket_keepalive_interval = 1 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.926953] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.tls_allowed_ciphers = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.927111] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.tls_cafile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.927272] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.tls_certfile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.927425] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.tls_enabled = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.927582] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cache.tls_keyfile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.927764] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cinder.auth_section = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.927933] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cinder.auth_type = password {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.928118] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cinder.cafile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.928311] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cinder.catalog_info = volumev3::publicURL {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.928549] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cinder.certfile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.928750] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cinder.collect_timing = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.928916] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cinder.cross_az_attach = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.929079] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cinder.debug = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.929242] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cinder.endpoint_template = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.929405] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cinder.http_retries = 3 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.929607] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cinder.insecure = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.929777] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cinder.keyfile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.929955] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cinder.os_region_name = RegionOne {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.930124] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cinder.split_loggers = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.930285] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cinder.timeout = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.930482] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.930655] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] compute.cpu_dedicated_set = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.930820] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] compute.cpu_shared_set = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.930991] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] compute.image_type_exclude_list = [] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.931154] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.931317] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] compute.max_concurrent_disk_ops = 0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.931505] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] compute.max_disk_devices_to_attach = -1 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.931691] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.931868] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.932054] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] compute.resource_provider_association_refresh = 300 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.932314] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] compute.shutdown_retry_interval = 10 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.932402] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.932582] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] conductor.workers = 2 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.932760] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] console.allowed_origins = [] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.932921] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] console.ssl_ciphers = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.933095] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] console.ssl_minimum_version = default {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.933295] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] consoleauth.token_ttl = 600 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.933422] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cyborg.cafile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.933580] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cyborg.certfile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.933745] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cyborg.collect_timing = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.933903] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cyborg.connect_retries = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.934063] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cyborg.connect_retry_delay = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.934221] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cyborg.endpoint_override = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.934381] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cyborg.insecure = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.934539] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cyborg.keyfile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.934701] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cyborg.max_version = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.934862] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cyborg.min_version = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.935020] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cyborg.region_name = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.935176] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cyborg.retriable_status_codes = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.935333] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cyborg.service_name = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.935500] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cyborg.service_type = accelerator {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.935669] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cyborg.split_loggers = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.935824] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cyborg.status_code_retries = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.935981] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cyborg.status_code_retry_delay = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.936159] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cyborg.timeout = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.936340] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.936504] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] cyborg.version = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.936697] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] database.backend = sqlalchemy {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.936880] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] database.connection = **** {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.937052] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] database.connection_debug = 0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.937225] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] database.connection_parameters = {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.937391] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] database.connection_recycle_time = 3600 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.937559] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] database.connection_trace = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.937727] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] database.db_inc_retry_interval = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.937892] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] database.db_max_retries = 20 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.938056] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] database.db_max_retry_interval = 10 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.938220] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] database.db_retry_interval = 1 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.938414] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] database.max_overflow = 50 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.938551] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] database.max_pool_size = 5 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.938722] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] database.max_retries = 10 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.938893] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.939052] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] database.mysql_wsrep_sync_wait = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.939216] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] database.pool_timeout = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.939384] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] database.retry_interval = 10 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.939544] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] database.slave_connection = **** {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.939711] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] database.sqlite_synchronous = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.939875] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] database.use_db_reconnect = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.940073] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api_database.backend = sqlalchemy {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.940256] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api_database.connection = **** {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.940429] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api_database.connection_debug = 0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.940635] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api_database.connection_parameters = {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.940810] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api_database.connection_recycle_time = 3600 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.940979] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api_database.connection_trace = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.941142] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api_database.db_inc_retry_interval = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.941305] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api_database.db_max_retries = 20 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.941490] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api_database.db_max_retry_interval = 10 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.941685] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api_database.db_retry_interval = 1 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.941860] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api_database.max_overflow = 50 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.942024] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api_database.max_pool_size = 5 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.942194] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api_database.max_retries = 10 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.942455] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.942531] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.942674] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api_database.pool_timeout = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.942845] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api_database.retry_interval = 10 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.943001] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api_database.slave_connection = **** {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.943222] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] api_database.sqlite_synchronous = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.943439] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] devices.enabled_mdev_types = [] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.943518] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.943666] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ephemeral_storage_encryption.enabled = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.943833] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.944007] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.api_servers = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.944210] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.cafile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.944373] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.certfile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.944537] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.collect_timing = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.944694] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.connect_retries = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.944850] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.connect_retry_delay = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.945009] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.debug = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.945171] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.default_trusted_certificate_ids = [] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.945330] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.enable_certificate_validation = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.945492] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.enable_rbd_download = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.945652] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.endpoint_override = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.945815] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.insecure = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.945975] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.keyfile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.946134] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.max_version = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.946288] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.min_version = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.946448] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.num_retries = 3 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.946618] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.rbd_ceph_conf = {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.946780] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.rbd_connect_timeout = 5 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.946944] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.rbd_pool = {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.947109] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.rbd_user = {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.947265] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.region_name = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.947419] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.retriable_status_codes = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.947575] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.service_name = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.947743] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.service_type = image {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.947903] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.split_loggers = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.948073] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.status_code_retries = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.948234] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.status_code_retry_delay = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.948393] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.timeout = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.948602] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.948774] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.verify_glance_signatures = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.948937] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] glance.version = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.949108] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] guestfs.debug = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.949277] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] mks.enabled = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.949652] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.949849] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] image_cache.manager_interval = 2400 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.950023] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] image_cache.precache_concurrency = 1 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.950205] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] image_cache.remove_unused_base_images = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.950384] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.950581] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.950767] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] image_cache.subdirectory_name = _base {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.950948] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ironic.api_max_retries = 60 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.951114] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ironic.api_retry_interval = 2 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.951284] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ironic.auth_section = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.951453] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ironic.auth_type = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.951614] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ironic.cafile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.951772] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ironic.certfile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.951969] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ironic.collect_timing = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.952153] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ironic.conductor_group = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.952314] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ironic.connect_retries = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.952477] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ironic.connect_retry_delay = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.952646] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ironic.endpoint_override = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.952797] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ironic.insecure = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.952954] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ironic.keyfile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.953111] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ironic.max_version = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.953365] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ironic.min_version = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.953448] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ironic.peer_list = [] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.953611] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ironic.region_name = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.953731] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ironic.retriable_status_codes = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.953898] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ironic.serial_console_state_timeout = 10 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.954058] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ironic.service_name = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.954229] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ironic.service_type = baremetal {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.954392] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ironic.split_loggers = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.954549] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ironic.status_code_retries = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.954708] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ironic.status_code_retry_delay = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.954865] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ironic.timeout = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.955044] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.955205] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ironic.version = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.955388] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.955563] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] key_manager.fixed_key = **** {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.955750] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.955912] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] barbican.barbican_api_version = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.956088] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] barbican.barbican_endpoint = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.956266] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] barbican.barbican_endpoint_type = public {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.956429] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] barbican.barbican_region_name = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.956592] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] barbican.cafile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.956752] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] barbican.certfile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.956978] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] barbican.collect_timing = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.957072] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] barbican.insecure = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.957230] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] barbican.keyfile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.957392] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] barbican.number_of_retries = 60 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.957556] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] barbican.retry_delay = 1 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.957720] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] barbican.send_service_user_token = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.957883] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] barbican.split_loggers = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.958041] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] barbican.timeout = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.958202] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] barbican.verify_ssl = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.958359] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] barbican.verify_ssl_path = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.958549] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] barbican_service_user.auth_section = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.958730] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] barbican_service_user.auth_type = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.958889] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] barbican_service_user.cafile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.959047] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] barbican_service_user.certfile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.959209] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] barbican_service_user.collect_timing = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.959371] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] barbican_service_user.insecure = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.959531] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] barbican_service_user.keyfile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.959693] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] barbican_service_user.split_loggers = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.959855] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] barbican_service_user.timeout = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.960029] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vault.approle_role_id = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.960187] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vault.approle_secret_id = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.960346] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vault.cafile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.960698] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vault.certfile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.960890] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vault.collect_timing = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.961059] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vault.insecure = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.961222] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vault.keyfile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.961399] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vault.kv_mountpoint = secret {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.961576] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vault.kv_path = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.961765] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vault.kv_version = 2 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.961928] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vault.namespace = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.962089] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vault.root_token_id = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.962254] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vault.split_loggers = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.962413] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vault.ssl_ca_crt_file = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.962573] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vault.timeout = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.962746] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vault.use_ssl = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.962899] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.963070] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] keystone.auth_section = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.963234] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] keystone.auth_type = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.963394] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] keystone.cafile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.963575] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] keystone.certfile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.963742] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] keystone.collect_timing = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.963899] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] keystone.connect_retries = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.964070] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] keystone.connect_retry_delay = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.964262] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] keystone.endpoint_override = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.964437] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] keystone.insecure = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.964598] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] keystone.keyfile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.964760] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] keystone.max_version = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.964924] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] keystone.min_version = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.965087] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] keystone.region_name = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.965309] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] keystone.retriable_status_codes = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.965489] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] keystone.service_name = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.965666] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] keystone.service_type = identity {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.965831] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] keystone.split_loggers = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.966013] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] keystone.status_code_retries = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.966145] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] keystone.status_code_retry_delay = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.966306] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] keystone.timeout = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.966492] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.966680] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] keystone.version = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.966898] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.connection_uri = {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.967063] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.cpu_mode = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.967228] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.cpu_model_extra_flags = [] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.967400] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.cpu_models = [] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.967572] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.cpu_power_governor_high = performance {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.967744] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.cpu_power_governor_low = powersave {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.967905] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.cpu_power_management = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.968089] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.968261] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.device_detach_attempts = 8 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.968423] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.device_detach_timeout = 20 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.968624] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.disk_cachemodes = [] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.968791] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.disk_prefix = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.968965] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.enabled_perf_events = [] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.969118] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.file_backed_memory = 0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.969283] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.gid_maps = [] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.969444] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.hw_disk_discard = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.969606] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.hw_machine_type = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.969810] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.images_rbd_ceph_conf = {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.969983] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.970154] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.970328] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.images_rbd_glance_store_name = {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.970534] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.images_rbd_pool = rbd {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.970719] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.images_type = default {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.970884] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.images_volume_group = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.971054] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.inject_key = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.971211] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.inject_partition = -2 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.971371] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.inject_password = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.971572] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.iscsi_iface = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.971765] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.iser_use_multipath = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.971936] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.live_migration_bandwidth = 0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.972118] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.972286] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.live_migration_downtime = 500 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.972449] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.972613] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.972882] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.live_migration_inbound_addr = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.972973] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.973095] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.live_migration_permit_post_copy = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.973254] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.live_migration_scheme = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.973427] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.live_migration_timeout_action = abort {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.973632] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.live_migration_tunnelled = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.973834] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.live_migration_uri = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.973918] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.live_migration_with_native_tls = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.974061] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.max_queues = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.974222] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.974457] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.974624] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.nfs_mount_options = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.974931] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.975106] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.975279] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.num_iser_scan_tries = 5 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.975446] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.num_memory_encrypted_guests = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.975617] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.975785] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.num_pcie_ports = 0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.975952] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.num_volume_scan_tries = 5 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.976140] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.pmem_namespaces = [] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.976303] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.quobyte_client_cfg = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.976604] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.976781] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.rbd_connect_timeout = 5 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.976948] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.977112] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.977274] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.rbd_secret_uuid = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.977433] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.rbd_user = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.977601] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.977777] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.remote_filesystem_transport = ssh {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.977939] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.rescue_image_id = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.978100] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.rescue_kernel_id = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.978256] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.rescue_ramdisk_id = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.978426] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.978589] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.rx_queue_size = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.978758] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.smbfs_mount_options = {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.979036] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.979209] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.snapshot_compression = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.979371] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.snapshot_image_format = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.979590] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.979758] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.sparse_logical_volumes = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.979923] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.swtpm_enabled = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.980108] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.swtpm_group = tss {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.980281] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.swtpm_user = tss {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.980465] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.sysinfo_serial = unique {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.980635] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.tb_cache_size = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.980802] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.tx_queue_size = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.980972] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.uid_maps = [] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.981135] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.use_virtio_for_bridges = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.981308] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.virt_type = kvm {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.981483] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.volume_clear = zero {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.981676] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.volume_clear_size = 0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.981875] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.volume_use_multipath = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.982012] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.vzstorage_cache_path = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.982185] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.982356] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.vzstorage_mount_group = qemu {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.982524] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.vzstorage_mount_opts = [] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.982695] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.983022] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.983154] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.vzstorage_mount_user = stack {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.983323] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.983500] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.auth_section = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.983772] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.auth_type = password {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.983857] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.cafile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.984024] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.certfile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.984172] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.collect_timing = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.984334] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.connect_retries = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.984498] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.connect_retry_delay = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.984675] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.default_floating_pool = public {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.984839] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.endpoint_override = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.985004] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.extension_sync_interval = 600 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.985172] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.http_retries = 3 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.985335] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.insecure = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.985497] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.keyfile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.985660] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.max_version = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.985832] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.985995] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.min_version = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.986166] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.ovs_bridge = br-int {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.986354] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.physnets = [] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.986505] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.region_name = RegionOne {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.986671] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.retriable_status_codes = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.986841] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.service_metadata_proxy = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.987004] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.service_name = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.987175] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.service_type = network {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.987340] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.split_loggers = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.987499] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.status_code_retries = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.987659] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.status_code_retry_delay = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.987816] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.timeout = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.987994] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.988172] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] neutron.version = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.988348] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] notifications.bdms_in_notifications = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.988530] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] notifications.default_level = INFO {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.988708] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] notifications.notification_format = unversioned {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.988875] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] notifications.notify_on_state_change = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.989055] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.989233] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] pci.alias = [] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.989402] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] pci.device_spec = [] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.989566] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] pci.report_in_placement = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.989741] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.auth_section = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.989916] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.auth_type = password {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.990078] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.990239] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.cafile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.990398] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.certfile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.990578] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.collect_timing = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.990750] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.connect_retries = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.990910] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.connect_retry_delay = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.991072] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.default_domain_id = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.991232] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.default_domain_name = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.991391] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.domain_id = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.991559] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.domain_name = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.991723] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.endpoint_override = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.991886] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.insecure = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.992055] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.keyfile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.992218] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.max_version = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.992378] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.min_version = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.992549] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.password = **** {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.992713] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.project_domain_id = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.992881] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.project_domain_name = Default {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.993049] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.project_id = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.993240] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.project_name = service {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.993385] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.region_name = RegionOne {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.993546] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.retriable_status_codes = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.993706] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.service_name = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.993911] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.service_type = placement {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.994098] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.split_loggers = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.994184] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.status_code_retries = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.994343] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.status_code_retry_delay = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.994504] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.system_scope = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.994664] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.timeout = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.994822] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.trust_id = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.994983] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.user_domain_id = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.995152] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.user_domain_name = Default {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.995313] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.user_id = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.995487] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.username = placement {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.995673] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.995837] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] placement.version = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.996089] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] quota.cores = 20 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.996197] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] quota.count_usage_from_placement = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.996374] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.996552] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] quota.injected_file_content_bytes = 10240 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.996722] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] quota.injected_file_path_length = 255 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.996890] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] quota.injected_files = 5 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.997054] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] quota.instances = 10 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.997218] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] quota.key_pairs = 100 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.997383] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] quota.metadata_items = 128 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.997547] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] quota.ram = 51200 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.997711] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] quota.recheck_quota = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.997877] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] quota.server_group_members = 10 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.998043] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] quota.server_groups = 10 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.998211] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] rdp.enabled = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.998539] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] rdp.html5_proxy_base_url = http://127.0.0.1:6083/ {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.998724] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.998891] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.999058] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] scheduler.image_metadata_prefilter = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.999222] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.999386] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] scheduler.max_attempts = 3 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.999550] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] scheduler.max_placement_results = 1000 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.999718] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 521.999881] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] scheduler.query_placement_for_image_type_support = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.000184] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.000395] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] scheduler.workers = 2 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.000597] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.000782] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.000966] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.001143] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.001310] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.001488] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.001663] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.001855] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.002029] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] filter_scheduler.host_subset_size = 1 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.002199] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.002365] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.002535] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.002706] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] filter_scheduler.isolated_hosts = [] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.002872] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] filter_scheduler.isolated_images = [] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.003038] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.003289] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.003375] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.003522] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] filter_scheduler.pci_in_placement = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.003689] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.003854] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.004122] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.004314] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.004377] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.004512] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.004677] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] filter_scheduler.track_instance_changes = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.004859] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.005034] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] metrics.required = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.005203] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] metrics.weight_multiplier = 1.0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.005368] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.005534] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] metrics.weight_setting = [] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.005851] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.006027] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] serial_console.enabled = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.006204] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] serial_console.port_range = 10000:20000 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.006377] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.006550] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.006723] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] serial_console.serialproxy_port = 6083 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.006895] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] service_user.auth_section = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.007071] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] service_user.auth_type = password {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.007236] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] service_user.cafile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.007397] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] service_user.certfile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.007664] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] service_user.collect_timing = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.007733] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] service_user.insecure = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.009213] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] service_user.keyfile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.009213] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] service_user.send_service_user_token = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.009213] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] service_user.split_loggers = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.009213] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] service_user.timeout = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.009213] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] spice.agent_enabled = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.009213] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] spice.enabled = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.009576] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.009576] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.009576] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] spice.html5proxy_port = 6082 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.009576] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] spice.image_compression = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.009807] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] spice.jpeg_compression = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.009864] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] spice.playback_compression = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.010009] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] spice.server_listen = 127.0.0.1 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.010190] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.010352] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] spice.streaming_mode = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.010546] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] spice.zlib_compression = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.010734] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] upgrade_levels.baseapi = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.010911] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] upgrade_levels.compute = auto {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.011076] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] upgrade_levels.conductor = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.011236] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] upgrade_levels.scheduler = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.011406] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vendordata_dynamic_auth.auth_section = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.011603] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vendordata_dynamic_auth.auth_type = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.011774] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vendordata_dynamic_auth.cafile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.011935] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vendordata_dynamic_auth.certfile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.012124] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.012279] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vendordata_dynamic_auth.insecure = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.012441] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vendordata_dynamic_auth.keyfile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.012604] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.012807] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vendordata_dynamic_auth.timeout = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.013033] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vmware.api_retry_count = 10 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.013203] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vmware.ca_file = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.013451] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vmware.cache_prefix = devstack-image-cache {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.013536] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vmware.cluster_name = testcl1 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.013705] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vmware.connection_pool_size = 10 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.013868] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vmware.console_delay_seconds = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.014039] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vmware.datastore_regex = ^datastore.* {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.014356] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.014452] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vmware.host_password = **** {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.014602] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vmware.host_port = 443 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.014775] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vmware.host_username = administrator@vsphere.local {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.014947] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vmware.insecure = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.015110] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vmware.integration_bridge = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.015278] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vmware.maximum_objects = 100 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.015438] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vmware.pbm_default_policy = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.015602] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vmware.pbm_enabled = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.015762] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vmware.pbm_wsdl_location = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.015935] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.016113] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vmware.serial_port_proxy_uri = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.016275] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vmware.serial_port_service_uri = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.016446] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vmware.task_poll_interval = 0.5 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.016624] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vmware.use_linked_clone = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.016795] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vmware.vnc_keymap = en-us {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.016963] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vmware.vnc_port = 5900 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.017130] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vmware.vnc_port_total = 10000 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.017319] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vnc.auth_schemes = ['none'] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.017489] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vnc.enabled = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.017806] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.017995] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.018169] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vnc.novncproxy_port = 6080 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.018364] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vnc.server_listen = 127.0.0.1 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.018582] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.018760] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vnc.vencrypt_ca_certs = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.018927] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vnc.vencrypt_client_cert = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.019090] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vnc.vencrypt_client_key = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.019267] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.019433] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.019600] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.019766] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.019930] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] workarounds.disable_rootwrap = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.020110] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] workarounds.enable_numa_live_migration = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.020280] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.020449] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.020655] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.020825] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] workarounds.libvirt_disable_apic = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.020989] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.021154] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.021317] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.021502] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.021683] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.021848] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.022013] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.022175] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.022335] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.022503] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.022697] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.022941] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] wsgi.client_socket_timeout = 900 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.023134] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] wsgi.default_pool_size = 1000 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.023306] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] wsgi.keep_alive = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.023571] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] wsgi.max_header_line = 16384 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.023646] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] wsgi.secure_proxy_ssl_header = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.023796] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] wsgi.ssl_ca_file = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.023959] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] wsgi.ssl_cert_file = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.024140] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] wsgi.ssl_key_file = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.024373] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] wsgi.tcp_keepidle = 600 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.024551] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.024656] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] zvm.ca_file = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.024822] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] zvm.cloud_connector_url = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.025134] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.025311] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] zvm.reachable_timeout = 300 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.025493] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_policy.enforce_new_defaults = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.025666] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_policy.enforce_scope = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.025847] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_policy.policy_default_rule = default {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.026027] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.026200] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_policy.policy_file = policy.yaml {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.026372] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.026535] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.026698] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.026857] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.027017] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.027184] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.027357] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.027535] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] profiler.connection_string = messaging:// {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.027705] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] profiler.enabled = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.027874] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] profiler.es_doc_type = notification {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.028047] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] profiler.es_scroll_size = 10000 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.028229] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] profiler.es_scroll_time = 2m {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.028403] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] profiler.filter_error_trace = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.028598] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] profiler.hmac_keys = **** {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.028778] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] profiler.sentinel_service_name = mymaster {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.028952] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] profiler.socket_timeout = 0.1 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.029115] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] profiler.trace_requests = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.029276] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] profiler.trace_sqlalchemy = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.029455] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] profiler_jaeger.process_tags = {} {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.029619] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] profiler_jaeger.service_name_prefix = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.029783] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] profiler_otlp.service_name_prefix = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.029950] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] remote_debug.host = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.030112] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] remote_debug.port = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.030292] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.030478] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.030652] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.030835] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.031069] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.031254] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.031422] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.031622] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.031795] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.031957] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.032145] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.032316] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.032494] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.032663] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.032828] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.033004] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.033169] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.033371] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.033494] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.033710] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.033818] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.033987] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.034150] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.034312] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.034511] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.034691] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.ssl = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.034805] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.034978] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.035142] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.035312] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.035482] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_rabbit.ssl_version = {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.035682] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.035846] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_notifications.retry = -1 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.036069] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.036216] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_messaging_notifications.transport_url = **** {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.036391] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_limit.auth_section = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.036554] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_limit.auth_type = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.036715] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_limit.cafile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.036875] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_limit.certfile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.037037] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_limit.collect_timing = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.037197] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_limit.connect_retries = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.037357] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_limit.connect_retry_delay = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.037514] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_limit.endpoint_id = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.037673] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_limit.endpoint_override = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.037831] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_limit.insecure = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.037988] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_limit.keyfile = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.038147] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_limit.max_version = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.038304] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_limit.min_version = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.038472] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_limit.region_name = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.038657] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_limit.retriable_status_codes = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.038819] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_limit.service_name = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.038980] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_limit.service_type = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.039143] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_limit.split_loggers = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.039302] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_limit.status_code_retries = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.039464] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_limit.status_code_retry_delay = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.039624] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_limit.timeout = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.039782] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_limit.valid_interfaces = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.039941] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_limit.version = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.040140] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_reports.file_event_handler = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.040313] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.040512] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] oslo_reports.log_dir = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.040687] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.040851] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.041012] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.041180] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.041347] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.041532] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.041741] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.041908] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vif_plug_ovs_privileged.group = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.042072] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.042238] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.042400] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.042560] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] vif_plug_ovs_privileged.user = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.042731] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] os_vif_linux_bridge.flat_interface = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.042943] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.043139] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.043315] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.043490] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.043667] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.043846] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.043994] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.044190] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.044363] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] os_vif_ovs.isolate_vif = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.044531] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.044810] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.044890] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.045017] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] os_vif_ovs.ovsdb_interface = native {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.045179] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] os_vif_ovs.per_port_bridge = False {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.045344] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] os_brick.lock_path = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.045512] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] privsep_osbrick.capabilities = [21] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.045675] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] privsep_osbrick.group = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.045832] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] privsep_osbrick.helper_command = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.045995] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.046161] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.046318] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] privsep_osbrick.user = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.046491] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.046652] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] nova_sys_admin.group = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.046813] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] nova_sys_admin.helper_command = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.046977] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.047139] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.047295] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] nova_sys_admin.user = None {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 522.047428] env[61868]: DEBUG oslo_service.service [None req-4114694a-df48-45f8-b7ff-32706629cf90 None None] ******************************************************************************** {{(pid=61868) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 522.047858] env[61868]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 522.057599] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] Getting list of instances from cluster (obj){ [ 522.057599] env[61868]: value = "domain-c8" [ 522.057599] env[61868]: _type = "ClusterComputeResource" [ 522.057599] env[61868]: } {{(pid=61868) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 522.058909] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ff6ac2-8108-4b48-b992-38062793411a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.069899] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] Got total of 0 instances {{(pid=61868) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 522.070537] env[61868]: WARNING nova.virt.vmwareapi.driver [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 522.071036] env[61868]: INFO nova.virt.node [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] Generated node identity 6539a0d3-09f9-481f-a837-7ea10081c3cc [ 522.071268] env[61868]: INFO nova.virt.node [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] Wrote node identity 6539a0d3-09f9-481f-a837-7ea10081c3cc to /opt/stack/data/n-cpu-1/compute_id [ 522.085190] env[61868]: WARNING nova.compute.manager [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] Compute nodes ['6539a0d3-09f9-481f-a837-7ea10081c3cc'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 522.121487] env[61868]: INFO nova.compute.manager [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 522.148521] env[61868]: WARNING nova.compute.manager [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 522.148787] env[61868]: DEBUG oslo_concurrency.lockutils [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 522.149007] env[61868]: DEBUG oslo_concurrency.lockutils [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 522.149174] env[61868]: DEBUG oslo_concurrency.lockutils [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 522.149389] env[61868]: DEBUG nova.compute.resource_tracker [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 522.150537] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f68d4937-ed56-47c2-a002-39916132bdd7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.159262] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f05c9973-27f5-4739-9735-896637e4d8c7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.174307] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1f996c6-f7c0-4069-a13a-8830c1f55117 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.181594] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5475d0e8-4aed-46f3-9816-bb6e687f3da7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.213655] env[61868]: DEBUG nova.compute.resource_tracker [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181952MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 522.213943] env[61868]: DEBUG oslo_concurrency.lockutils [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 522.214046] env[61868]: DEBUG oslo_concurrency.lockutils [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 522.227161] env[61868]: WARNING nova.compute.resource_tracker [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] No compute node record for cpu-1:6539a0d3-09f9-481f-a837-7ea10081c3cc: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 6539a0d3-09f9-481f-a837-7ea10081c3cc could not be found. [ 522.243008] env[61868]: INFO nova.compute.resource_tracker [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 6539a0d3-09f9-481f-a837-7ea10081c3cc [ 522.300545] env[61868]: DEBUG nova.compute.resource_tracker [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 522.300830] env[61868]: DEBUG nova.compute.resource_tracker [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 522.406247] env[61868]: INFO nova.scheduler.client.report [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] [req-b1b0ed7f-27de-444e-a8e0-95d698d1ab36] Created resource provider record via placement API for resource provider with UUID 6539a0d3-09f9-481f-a837-7ea10081c3cc and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 522.423703] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a490ed8c-ee3d-48fe-babd-48d3037eb4e6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.431526] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e110c1c-5ab6-4559-afa6-da5695add417 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.461519] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f53cd36-68c7-48d1-b22d-da6201311500 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.469545] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6586465c-80fb-48ac-9a13-a16ef99d82d0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.483425] env[61868]: DEBUG nova.compute.provider_tree [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] Updating inventory in ProviderTree for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 522.522477] env[61868]: DEBUG nova.scheduler.client.report [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] Updated inventory for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 522.522721] env[61868]: DEBUG nova.compute.provider_tree [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] Updating resource provider 6539a0d3-09f9-481f-a837-7ea10081c3cc generation from 0 to 1 during operation: update_inventory {{(pid=61868) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 522.522889] env[61868]: DEBUG nova.compute.provider_tree [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] Updating inventory in ProviderTree for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 522.572070] env[61868]: DEBUG nova.compute.provider_tree [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] Updating resource provider 6539a0d3-09f9-481f-a837-7ea10081c3cc generation from 1 to 2 during operation: update_traits {{(pid=61868) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 522.592612] env[61868]: DEBUG nova.compute.resource_tracker [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 522.592808] env[61868]: DEBUG oslo_concurrency.lockutils [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.379s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 522.592974] env[61868]: DEBUG nova.service [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] Creating RPC server for service compute {{(pid=61868) start /opt/stack/nova/nova/service.py:182}} [ 522.607409] env[61868]: DEBUG nova.service [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] Join ServiceGroup membership for this service compute {{(pid=61868) start /opt/stack/nova/nova/service.py:199}} [ 522.607615] env[61868]: DEBUG nova.servicegroup.drivers.db [None req-192fb88b-cc07-47f6-b125-97514e48c997 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=61868) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 531.887863] env[61868]: DEBUG dbcounter [-] [61868] Writing DB stats nova_cell1:SELECT=1 {{(pid=61868) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 531.889797] env[61868]: DEBUG dbcounter [-] [61868] Writing DB stats nova_cell0:SELECT=1 {{(pid=61868) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 549.610384] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 549.622599] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Getting list of instances from cluster (obj){ [ 549.622599] env[61868]: value = "domain-c8" [ 549.622599] env[61868]: _type = "ClusterComputeResource" [ 549.622599] env[61868]: } {{(pid=61868) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 549.623713] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6390e47e-dd98-4987-83ee-71ae7a8a75d5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.633710] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Got total of 0 instances {{(pid=61868) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 549.634042] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 549.634249] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Getting list of instances from cluster (obj){ [ 549.634249] env[61868]: value = "domain-c8" [ 549.634249] env[61868]: _type = "ClusterComputeResource" [ 549.634249] env[61868]: } {{(pid=61868) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 549.635339] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b00eff66-e148-4390-85b5-7ea4bc749d55 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.643401] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Got total of 0 instances {{(pid=61868) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 578.361239] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 578.361666] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 578.361802] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 578.361884] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 578.374820] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 578.375054] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 578.375294] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 578.376260] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 578.376484] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 578.376714] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 578.376871] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 578.377076] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 578.377230] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 578.388364] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 578.388786] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 578.388888] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 578.389279] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 578.390658] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2af93c4-73cd-41a6-b027-dde74397fde3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.405344] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1562ec43-c99c-4674-b6e9-3e37621ddd75 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.426693] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-843f8bfd-ca07-4dae-bacf-2965b586b710 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.436154] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12642709-67de-407f-84bb-a092d2e9e576 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.478724] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181950MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 578.479070] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 578.479428] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 578.544919] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 578.545347] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 578.565737] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe4da704-f96d-4eb6-9ca1-24641ac4dd4f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.576475] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75216f00-9838-4638-a280-95ea0f37f6cc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.612466] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73fce86a-8f07-4686-8dd0-92ef1f173f6b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.621718] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c640b05-7a2e-4510-bb62-c00089b4313d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.644234] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 578.656721] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 578.686321] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 578.686525] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.207s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 583.809671] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Acquiring lock "75fcbf2e-6097-42b1-9857-be7aecb9b7c0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 583.809671] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Lock "75fcbf2e-6097-42b1-9857-be7aecb9b7c0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 583.831901] env[61868]: DEBUG nova.compute.manager [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 583.933934] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 583.934123] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 583.935817] env[61868]: INFO nova.compute.claims [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 584.077741] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ea64a0-1238-4853-9cae-950671d0b6a0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.086387] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-983181a3-6882-400f-b9c4-49c60f0cf736 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.142677] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f8f9d13-97bc-4fd3-afca-cbe1ba516557 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.151094] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc476076-b1ba-4ec9-bcb3-25f27bfcdc15 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.170158] env[61868]: DEBUG nova.compute.provider_tree [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 584.182084] env[61868]: DEBUG nova.scheduler.client.report [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 584.205696] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.271s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 584.206248] env[61868]: DEBUG nova.compute.manager [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 584.253319] env[61868]: DEBUG nova.compute.utils [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 584.255141] env[61868]: DEBUG nova.compute.manager [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Not allocating networking since 'none' was specified. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 584.274780] env[61868]: DEBUG nova.compute.manager [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 584.353829] env[61868]: DEBUG nova.compute.manager [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 584.939658] env[61868]: DEBUG nova.virt.hardware [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 584.939953] env[61868]: DEBUG nova.virt.hardware [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 584.940134] env[61868]: DEBUG nova.virt.hardware [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 584.940337] env[61868]: DEBUG nova.virt.hardware [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 584.940527] env[61868]: DEBUG nova.virt.hardware [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 584.940698] env[61868]: DEBUG nova.virt.hardware [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 584.940908] env[61868]: DEBUG nova.virt.hardware [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 584.941063] env[61868]: DEBUG nova.virt.hardware [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 584.941464] env[61868]: DEBUG nova.virt.hardware [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 584.941631] env[61868]: DEBUG nova.virt.hardware [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 584.941931] env[61868]: DEBUG nova.virt.hardware [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 584.942868] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e4811d6-94be-40b6-b26f-0882f61f869a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.952209] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e31fbd8-909b-4578-9e30-2fe9c12e5982 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.969027] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ba047a-ea59-4dea-b11b-ae0243a8e21b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.990175] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Instance VIF info [] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 585.003899] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 585.004216] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6fafb2ba-8486-4026-aaf7-e531130dac55 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.019501] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Created folder: OpenStack in parent group-v4. [ 585.019690] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Creating folder: Project (684bb9aa77364976ae8ac3a4e3bb4a22). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 585.020175] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5005d496-4029-4ea9-b719-aa57528f354f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.031936] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Created folder: Project (684bb9aa77364976ae8ac3a4e3bb4a22) in parent group-v18181. [ 585.032362] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Creating folder: Instances. Parent ref: group-v18182. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 585.032457] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7d3fd5e3-5197-4293-8143-2e7e236bd91e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.044321] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Created folder: Instances in parent group-v18182. [ 585.044583] env[61868]: DEBUG oslo.service.loopingcall [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 585.044780] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 585.045011] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-803864aa-f428-455e-b1b4-56b2f2e1aad9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.065838] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 585.065838] env[61868]: value = "task-40893" [ 585.065838] env[61868]: _type = "Task" [ 585.065838] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.081093] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40893, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.575959] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40893, 'name': CreateVM_Task, 'duration_secs': 0.373244} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.577347] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 585.577880] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 585.578140] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 585.589100] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c764e0a0-3714-4155-83ca-b8058bce118f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.623721] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Reconfiguring VM instance to enable vnc on port - 5900 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 585.624118] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1e93178-8856-4878-bd60-22438b4eaaab {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.643553] env[61868]: DEBUG oslo_vmware.api [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Waiting for the task: (returnval){ [ 585.643553] env[61868]: value = "task-40894" [ 585.643553] env[61868]: _type = "Task" [ 585.643553] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.652634] env[61868]: DEBUG oslo_vmware.api [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Task: {'id': task-40894, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.071217] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Acquiring lock "7b89d270-4000-484f-87dd-507335e5c7dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 586.071217] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Lock "7b89d270-4000-484f-87dd-507335e5c7dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 586.108879] env[61868]: DEBUG nova.compute.manager [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 586.154398] env[61868]: DEBUG oslo_vmware.api [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Task: {'id': task-40894, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.206590] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 586.207026] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 586.208837] env[61868]: INFO nova.compute.claims [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 586.422353] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9525b0ab-2ea3-4c66-a802-ce378f8aae46 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.432130] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9721ed3-4ba8-4140-9d05-909c1487d95c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.470418] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57bebf3f-d35d-42ac-b188-435aa54c9b85 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.479388] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91b1910d-4f88-48b2-a6e6-ff425f117450 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.498410] env[61868]: DEBUG nova.compute.provider_tree [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 586.510398] env[61868]: DEBUG nova.scheduler.client.report [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 586.530681] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.323s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 586.532103] env[61868]: DEBUG nova.compute.manager [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 586.583989] env[61868]: DEBUG nova.compute.utils [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 586.585560] env[61868]: DEBUG nova.compute.manager [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Not allocating networking since 'none' was specified. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 586.602086] env[61868]: DEBUG nova.compute.manager [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 586.655745] env[61868]: DEBUG oslo_vmware.api [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Task: {'id': task-40894, 'name': ReconfigVM_Task, 'duration_secs': 0.869627} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.656040] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Reconfigured VM instance to enable vnc on port - 5900 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 586.656249] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 1.078s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 586.657000] env[61868]: DEBUG oslo_vmware.service [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83a143cc-7ef6-4f77-af3d-8e55645493cd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.664066] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 586.664249] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 586.664984] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 586.665245] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fdcfe9c-ee88-442e-8ac6-fc09968af71f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.673782] env[61868]: DEBUG oslo_vmware.api [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Waiting for the task: (returnval){ [ 586.673782] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52dc5b09-ace9-e635-ce45-78621199fcd0" [ 586.673782] env[61868]: _type = "Task" [ 586.673782] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.693716] env[61868]: DEBUG oslo_vmware.api [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52dc5b09-ace9-e635-ce45-78621199fcd0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.721207] env[61868]: DEBUG nova.compute.manager [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 586.749128] env[61868]: DEBUG nova.virt.hardware [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 586.749399] env[61868]: DEBUG nova.virt.hardware [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 586.749561] env[61868]: DEBUG nova.virt.hardware [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 586.749748] env[61868]: DEBUG nova.virt.hardware [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 586.749926] env[61868]: DEBUG nova.virt.hardware [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 586.750080] env[61868]: DEBUG nova.virt.hardware [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 586.750281] env[61868]: DEBUG nova.virt.hardware [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 586.750434] env[61868]: DEBUG nova.virt.hardware [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 586.750626] env[61868]: DEBUG nova.virt.hardware [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 586.750799] env[61868]: DEBUG nova.virt.hardware [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 586.750984] env[61868]: DEBUG nova.virt.hardware [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 586.751840] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4ed0134-f0e2-4bc0-9bed-8494e97a5420 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.761335] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f790ba20-54db-4495-a2e5-13962ffc7e6c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.775773] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Instance VIF info [] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 586.782981] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Creating folder: Project (127c638b692d4a9a9fa25eec263f7b08). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 586.783314] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-df6a548e-5db7-4ffb-8245-a7180d877534 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.798594] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Created folder: Project (127c638b692d4a9a9fa25eec263f7b08) in parent group-v18181. [ 586.798594] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Creating folder: Instances. Parent ref: group-v18185. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 586.798594] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3359f015-f3f0-4ea5-8cf7-05724e983789 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.805919] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Created folder: Instances in parent group-v18185. [ 586.806169] env[61868]: DEBUG oslo.service.loopingcall [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 586.806363] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 586.806566] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3050e07b-e42d-4b40-a86b-1d4acb319663 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.825660] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 586.825660] env[61868]: value = "task-40897" [ 586.825660] env[61868]: _type = "Task" [ 586.825660] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.842880] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40897, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.186940] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 587.186940] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 587.186940] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 587.186940] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 587.187289] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 587.187289] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e47046b5-ecfd-4f07-9e37-8a3420617460 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.203618] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 587.203618] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 587.203618] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-906c25da-411d-4fce-a187-b2d815cc714a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.205995] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9196c883-e337-4104-b219-7b1cba1c4f16 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.215832] env[61868]: DEBUG oslo_vmware.api [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Waiting for the task: (returnval){ [ 587.215832] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52a2bc01-52f4-4920-6a13-4403f0759552" [ 587.215832] env[61868]: _type = "Task" [ 587.215832] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.221019] env[61868]: DEBUG oslo_vmware.api [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52a2bc01-52f4-4920-6a13-4403f0759552, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.336413] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40897, 'name': CreateVM_Task} progress is 99%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.604452] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquiring lock "163a2904-1b18-4a83-9acf-6c9fe3ad511c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 587.604452] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Lock "163a2904-1b18-4a83-9acf-6c9fe3ad511c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 587.619903] env[61868]: DEBUG nova.compute.manager [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 587.685967] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 587.686225] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 587.687769] env[61868]: INFO nova.compute.claims [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 587.722739] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 587.722986] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Creating directory with path [datastore2] vmware_temp/bd815e09-39cf-4898-9448-79bb3e876fa5/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 587.723217] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-66d75a74-89c3-4a9b-8d60-6068c0ef49ea {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.754285] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Created directory with path [datastore2] vmware_temp/bd815e09-39cf-4898-9448-79bb3e876fa5/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 587.754786] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Fetch image to [datastore2] vmware_temp/bd815e09-39cf-4898-9448-79bb3e876fa5/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 587.754786] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/bd815e09-39cf-4898-9448-79bb3e876fa5/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 587.756305] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb9e857-9232-49d5-bc04-28fb5e1fddd3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.767346] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47dd1777-c685-4920-8a41-4c80b597aff1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.784923] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-490784dc-1bcc-43f2-bae3-e9be46a590e8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.827528] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e23f979-62a8-482b-b4e2-b6f37bf3d7ca {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.850290] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1c254799-c477-4fa2-b448-95888c0c11c0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.856521] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40897, 'name': CreateVM_Task} progress is 99%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.861467] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2644f70-89ac-4f9d-9753-090c3cee0024 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.871008] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79de49b8-7d86-4962-9730-ee91cc8da9b8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.914122] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c56332b8-fb61-409f-bfee-cfc01fd3f979 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.925787] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e11e2f33-00d8-48f5-b114-c118cd529ae2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.948163] env[61868]: DEBUG nova.compute.provider_tree [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 587.950158] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 587.956923] env[61868]: DEBUG nova.scheduler.client.report [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 587.985110] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.293s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 587.985110] env[61868]: DEBUG nova.compute.manager [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 588.026846] env[61868]: DEBUG nova.compute.utils [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 588.029277] env[61868]: DEBUG nova.compute.manager [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 588.029578] env[61868]: DEBUG nova.network.neutron [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 588.044319] env[61868]: DEBUG oslo_vmware.rw_handles [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/bd815e09-39cf-4898-9448-79bb3e876fa5/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 588.049639] env[61868]: DEBUG nova.compute.manager [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 588.128696] env[61868]: DEBUG oslo_vmware.rw_handles [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 588.128948] env[61868]: DEBUG oslo_vmware.rw_handles [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/bd815e09-39cf-4898-9448-79bb3e876fa5/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 588.180658] env[61868]: DEBUG nova.compute.manager [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 588.212884] env[61868]: DEBUG nova.virt.hardware [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 588.213206] env[61868]: DEBUG nova.virt.hardware [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 588.213294] env[61868]: DEBUG nova.virt.hardware [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 588.213469] env[61868]: DEBUG nova.virt.hardware [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 588.213610] env[61868]: DEBUG nova.virt.hardware [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 588.213751] env[61868]: DEBUG nova.virt.hardware [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 588.213958] env[61868]: DEBUG nova.virt.hardware [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 588.214103] env[61868]: DEBUG nova.virt.hardware [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 588.214263] env[61868]: DEBUG nova.virt.hardware [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 588.214416] env[61868]: DEBUG nova.virt.hardware [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 588.214578] env[61868]: DEBUG nova.virt.hardware [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 588.215587] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4e44a12-a86f-4d8d-b3b0-01809344b622 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.225402] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-731c0c4f-9d1b-4ce6-a91e-c2e39edc25df {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.245938] env[61868]: DEBUG nova.network.neutron [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] No network configured {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1188}} [ 588.246503] env[61868]: DEBUG nova.compute.manager [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Instance network_info: |[]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 588.246864] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Instance VIF info [] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 588.252995] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Creating folder: Project (1076dd41304d46698a638e9f3c7f9bba). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 588.253670] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-81968168-24a0-4705-8a40-28ca45771175 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.269631] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Created folder: Project (1076dd41304d46698a638e9f3c7f9bba) in parent group-v18181. [ 588.270113] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Creating folder: Instances. Parent ref: group-v18188. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 588.270433] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a1eaa029-59ae-4bfb-a21e-8d10c7615320 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.281850] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Created folder: Instances in parent group-v18188. [ 588.282122] env[61868]: DEBUG oslo.service.loopingcall [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 588.282319] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 588.282522] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3ae278c6-2539-4726-9147-38bd9c484d02 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.309602] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 588.309602] env[61868]: value = "task-40900" [ 588.309602] env[61868]: _type = "Task" [ 588.309602] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.324034] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40900, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.341823] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40897, 'name': CreateVM_Task, 'duration_secs': 1.253678} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.342019] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 588.342502] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 588.342731] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 588.345882] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a80a2431-620d-422f-94f6-0a24babd3271 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.380587] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Reconfiguring VM instance to enable vnc on port - 5901 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 588.381237] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ed4918a-011b-481f-a7d3-b10a24dd738a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.401184] env[61868]: DEBUG oslo_vmware.api [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Waiting for the task: (returnval){ [ 588.401184] env[61868]: value = "task-40901" [ 588.401184] env[61868]: _type = "Task" [ 588.401184] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.420779] env[61868]: DEBUG oslo_vmware.api [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Task: {'id': task-40901, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.653482] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Acquiring lock "b9c8dd4c-8a19-4ffb-8e57-b273c000f121" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 588.653711] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Lock "b9c8dd4c-8a19-4ffb-8e57-b273c000f121" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 588.668042] env[61868]: DEBUG nova.compute.manager [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 588.756118] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 588.756118] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 588.756118] env[61868]: INFO nova.compute.claims [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 588.819961] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40900, 'name': CreateVM_Task, 'duration_secs': 0.266782} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.820885] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 588.823832] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 588.920926] env[61868]: DEBUG oslo_vmware.api [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Task: {'id': task-40901, 'name': ReconfigVM_Task, 'duration_secs': 0.138413} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.921205] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Reconfigured VM instance to enable vnc on port - 5901 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 588.921424] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.579s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 588.921729] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 588.921884] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 588.922480] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 588.922770] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.100s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 588.923031] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d071de68-45c8-4b06-8f7e-4aa7dc3efa7c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.928344] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b674e3d-81ee-4218-9ba9-3506f0fe1fee {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.954922] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3cab229-1d5b-49ef-96e4-e0274667bc30 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.962961] env[61868]: DEBUG oslo_vmware.api [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Waiting for the task: (returnval){ [ 588.962961] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]529ace27-9ff4-ebb9-a8c7-e8f0ef904a9e" [ 588.962961] env[61868]: _type = "Task" [ 588.962961] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.979250] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Reconfiguring VM instance to enable vnc on port - 5902 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 588.984613] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ded58587-5d7f-4007-ade9-0fb9f03afcc6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.995282] env[61868]: DEBUG oslo_vmware.api [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]529ace27-9ff4-ebb9-a8c7-e8f0ef904a9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.996547] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-107e7452-3d89-42d1-a64a-20f29eedf013 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.036319] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c292c15c-47a7-4076-8064-99ba1b7ab6e5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.037616] env[61868]: DEBUG oslo_vmware.api [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Waiting for the task: (returnval){ [ 589.037616] env[61868]: value = "task-40902" [ 589.037616] env[61868]: _type = "Task" [ 589.037616] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.045822] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b68637d8-9366-459e-bf5b-898e99d7729a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.053601] env[61868]: DEBUG oslo_vmware.api [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Task: {'id': task-40902, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.071001] env[61868]: DEBUG nova.compute.provider_tree [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 589.083735] env[61868]: DEBUG nova.scheduler.client.report [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 589.114719] env[61868]: DEBUG oslo_concurrency.lockutils [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Acquiring lock "05c290e0-e98f-4f66-9e2c-f7d21992bb88" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 589.115052] env[61868]: DEBUG oslo_concurrency.lockutils [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Lock "05c290e0-e98f-4f66-9e2c-f7d21992bb88" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 589.116592] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.364s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 589.117177] env[61868]: DEBUG nova.compute.manager [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 589.144583] env[61868]: DEBUG nova.compute.manager [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 589.210541] env[61868]: DEBUG nova.compute.utils [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 589.212048] env[61868]: DEBUG nova.compute.manager [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 589.212314] env[61868]: DEBUG nova.network.neutron [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 589.260147] env[61868]: DEBUG nova.compute.manager [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 589.268528] env[61868]: DEBUG oslo_concurrency.lockutils [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 589.268528] env[61868]: DEBUG oslo_concurrency.lockutils [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 589.269843] env[61868]: INFO nova.compute.claims [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 589.313714] env[61868]: DEBUG nova.network.neutron [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] No network configured {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1188}} [ 589.314056] env[61868]: DEBUG nova.compute.manager [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Instance network_info: |[]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 589.381622] env[61868]: DEBUG nova.compute.manager [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 589.403654] env[61868]: DEBUG nova.virt.hardware [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 589.403878] env[61868]: DEBUG nova.virt.hardware [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 589.404051] env[61868]: DEBUG nova.virt.hardware [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 589.404237] env[61868]: DEBUG nova.virt.hardware [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 589.404378] env[61868]: DEBUG nova.virt.hardware [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 589.404518] env[61868]: DEBUG nova.virt.hardware [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 589.404843] env[61868]: DEBUG nova.virt.hardware [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 589.405009] env[61868]: DEBUG nova.virt.hardware [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 589.405178] env[61868]: DEBUG nova.virt.hardware [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 589.405336] env[61868]: DEBUG nova.virt.hardware [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 589.405504] env[61868]: DEBUG nova.virt.hardware [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 589.406365] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a29bf4d-38fa-4616-9d33-2761534058f1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.416438] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f32b4f69-3f2d-42d4-9028-efe2c0d156e2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.442013] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Instance VIF info [] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 589.447718] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Creating folder: Project (080571381ae547a9abff5152ac9de26e). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 589.448340] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-23a27a90-9db4-485e-b0fb-f965cfe62ebc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.468236] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Created folder: Project (080571381ae547a9abff5152ac9de26e) in parent group-v18181. [ 589.468236] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Creating folder: Instances. Parent ref: group-v18191. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 589.468236] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-46da33c8-f12c-4abd-9b34-ce8a0397d434 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.475582] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 589.475825] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 589.476051] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 589.476266] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Created folder: Instances in parent group-v18191. [ 589.476480] env[61868]: DEBUG oslo.service.loopingcall [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 589.476754] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 589.476902] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b93c109e-fcf2-4253-80ef-7e8e839fc52a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.496721] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 589.496721] env[61868]: value = "task-40905" [ 589.496721] env[61868]: _type = "Task" [ 589.496721] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.508108] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40905, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.515965] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2ac7410-009e-400d-8952-c00de5e4b47f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.524109] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de41a40a-eb82-4a6e-a6de-0b39e5bd3fcc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.570130] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ada9898-74a8-4d53-9d42-d9158564badc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.577592] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Acquiring lock "c1efc2dd-6474-4fba-a00e-f104f0d446de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 589.577814] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Lock "c1efc2dd-6474-4fba-a00e-f104f0d446de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 589.588980] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b5bbb33-9853-46f2-aa08-d03b50e3f951 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.594328] env[61868]: DEBUG oslo_vmware.api [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Task: {'id': task-40902, 'name': ReconfigVM_Task, 'duration_secs': 0.115175} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.594462] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Reconfigured VM instance to enable vnc on port - 5902 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 589.594704] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.672s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 589.595108] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 589.595191] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 589.595481] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 589.596583] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a45ebf05-151a-4c70-a6d0-d56bacf5253c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.610493] env[61868]: DEBUG nova.compute.manager [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 589.614245] env[61868]: DEBUG nova.compute.provider_tree [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 589.620075] env[61868]: DEBUG oslo_vmware.api [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Waiting for the task: (returnval){ [ 589.620075] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]5231b6ed-bce2-1343-fd01-197962b46351" [ 589.620075] env[61868]: _type = "Task" [ 589.620075] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.629987] env[61868]: DEBUG oslo_vmware.api [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]5231b6ed-bce2-1343-fd01-197962b46351, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.632175] env[61868]: DEBUG nova.scheduler.client.report [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 589.661983] env[61868]: DEBUG oslo_concurrency.lockutils [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.394s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 589.662769] env[61868]: DEBUG nova.compute.manager [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 589.690367] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 589.690722] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 589.692694] env[61868]: INFO nova.compute.claims [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 589.699510] env[61868]: DEBUG nova.compute.utils [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 589.701589] env[61868]: DEBUG nova.compute.manager [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 589.701758] env[61868]: DEBUG nova.network.neutron [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 589.712440] env[61868]: DEBUG nova.compute.manager [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 589.814571] env[61868]: DEBUG nova.network.neutron [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] No network configured {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1188}} [ 589.814744] env[61868]: DEBUG nova.compute.manager [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Instance network_info: |[]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 589.816881] env[61868]: DEBUG nova.compute.manager [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 589.849226] env[61868]: DEBUG nova.virt.hardware [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 589.849679] env[61868]: DEBUG nova.virt.hardware [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 589.850543] env[61868]: DEBUG nova.virt.hardware [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 589.851201] env[61868]: DEBUG nova.virt.hardware [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 589.852149] env[61868]: DEBUG nova.virt.hardware [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 589.852378] env[61868]: DEBUG nova.virt.hardware [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 589.852614] env[61868]: DEBUG nova.virt.hardware [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 589.852802] env[61868]: DEBUG nova.virt.hardware [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 589.853053] env[61868]: DEBUG nova.virt.hardware [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 589.853178] env[61868]: DEBUG nova.virt.hardware [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 589.853362] env[61868]: DEBUG nova.virt.hardware [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 589.854622] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba3e8412-fa6f-411f-b6c0-f2bfc66efca1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.868929] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da7c9dc4-e1a6-4ba2-b09b-8d7ee38e06b4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.890546] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Instance VIF info [] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 589.897367] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Creating folder: Project (e71b3b61d48d480e984ae9be0bc38413). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 589.900117] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a5a7b906-c17a-46ad-83f7-11159fe2dd8f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.914247] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Created folder: Project (e71b3b61d48d480e984ae9be0bc38413) in parent group-v18181. [ 589.914447] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Creating folder: Instances. Parent ref: group-v18194. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 589.914934] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b3d47312-a96d-445c-b1bc-a926e2bd4558 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.927974] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-865af377-45e6-42fe-af8d-9104d0de53b2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.933482] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Created folder: Instances in parent group-v18194. [ 589.933733] env[61868]: DEBUG oslo.service.loopingcall [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 589.934614] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 589.934842] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ecce1655-0a15-4dfa-971a-056f090d67f9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.954808] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e42aea56-13d7-4dec-b01e-57a1a666ec65 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.959592] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 589.959592] env[61868]: value = "task-40908" [ 589.959592] env[61868]: _type = "Task" [ 589.959592] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.995235] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d09cfa44-30b9-40d7-adf4-2abcd690c0f1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.996527] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40908, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.009816] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e411a02-6960-4b44-bf32-d182c36dcb42 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.019558] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40905, 'name': CreateVM_Task, 'duration_secs': 0.280986} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.019831] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 590.021152] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 590.021152] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 590.024732] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b668b9-d4d2-4e5b-bc5a-9d7c482a1287 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.049701] env[61868]: DEBUG nova.compute.provider_tree [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 590.060448] env[61868]: DEBUG nova.scheduler.client.report [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 590.078471] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Reconfiguring VM instance to enable vnc on port - 5903 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 590.079417] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-20d23a15-f675-467c-9f79-b8aefaf89466 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.096091] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.405s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 590.096597] env[61868]: DEBUG nova.compute.manager [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 590.105279] env[61868]: DEBUG oslo_vmware.api [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Waiting for the task: (returnval){ [ 590.105279] env[61868]: value = "task-40909" [ 590.105279] env[61868]: _type = "Task" [ 590.105279] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.130190] env[61868]: DEBUG oslo_vmware.api [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Task: {'id': task-40909, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.140426] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 590.140426] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 590.140698] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 590.201894] env[61868]: DEBUG nova.compute.utils [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 590.203275] env[61868]: DEBUG nova.compute.manager [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 590.203445] env[61868]: DEBUG nova.network.neutron [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 590.234161] env[61868]: DEBUG nova.compute.manager [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 590.280303] env[61868]: DEBUG nova.network.neutron [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] No network configured {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1188}} [ 590.280303] env[61868]: DEBUG nova.compute.manager [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Instance network_info: |[]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 590.340818] env[61868]: DEBUG nova.compute.manager [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 590.364098] env[61868]: DEBUG nova.virt.hardware [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 590.364377] env[61868]: DEBUG nova.virt.hardware [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 590.364552] env[61868]: DEBUG nova.virt.hardware [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 590.364768] env[61868]: DEBUG nova.virt.hardware [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 590.364921] env[61868]: DEBUG nova.virt.hardware [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 590.365070] env[61868]: DEBUG nova.virt.hardware [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 590.365278] env[61868]: DEBUG nova.virt.hardware [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 590.365435] env[61868]: DEBUG nova.virt.hardware [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 590.365604] env[61868]: DEBUG nova.virt.hardware [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 590.365767] env[61868]: DEBUG nova.virt.hardware [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 590.365939] env[61868]: DEBUG nova.virt.hardware [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 590.366817] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7204574c-2e0a-4136-9c0d-3f6d6b702734 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.376150] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fc3a092-d45d-412e-bd18-82e9cb06b2a9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.393665] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Instance VIF info [] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 590.399286] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Creating folder: Project (dc52e005af4a40018755e2cbbe5775c6). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 590.399631] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c3979878-7bad-4aba-9e45-02ee7c286eae {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.411757] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Created folder: Project (dc52e005af4a40018755e2cbbe5775c6) in parent group-v18181. [ 590.411985] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Creating folder: Instances. Parent ref: group-v18197. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 590.412255] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c62bca2c-0c47-4362-bd9d-a56b7efcdd31 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.427175] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Created folder: Instances in parent group-v18197. [ 590.427327] env[61868]: DEBUG oslo.service.loopingcall [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 590.427535] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 590.427744] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-70263aaf-3a4e-435d-8a5b-d1bc83c3b9c6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.448118] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 590.448118] env[61868]: value = "task-40912" [ 590.448118] env[61868]: _type = "Task" [ 590.448118] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.460417] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40912, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.474542] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40908, 'name': CreateVM_Task, 'duration_secs': 0.277218} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.474726] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 590.475062] env[61868]: DEBUG oslo_concurrency.lockutils [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 590.616042] env[61868]: DEBUG oslo_vmware.api [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Task: {'id': task-40909, 'name': ReconfigVM_Task, 'duration_secs': 0.115626} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.616391] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Reconfigured VM instance to enable vnc on port - 5903 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 590.616601] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.596s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 590.616876] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 590.617034] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 590.617408] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 590.617736] env[61868]: DEBUG oslo_concurrency.lockutils [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.143s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 590.617979] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1cf6e78-20d6-4739-b5a6-7d314bb6cdae {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.631876] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c2bf5f6-148f-4339-980b-116abe54a31c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.649513] env[61868]: DEBUG oslo_vmware.api [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Waiting for the task: (returnval){ [ 590.649513] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]5291a349-f2ff-0db5-8629-4e916682ec6c" [ 590.649513] env[61868]: _type = "Task" [ 590.649513] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.659190] env[61868]: DEBUG oslo_vmware.api [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]5291a349-f2ff-0db5-8629-4e916682ec6c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.670961] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Reconfiguring VM instance to enable vnc on port - 5904 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 590.671434] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91b56780-1e30-45f2-8443-db5b98ee595c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.689947] env[61868]: DEBUG oslo_vmware.api [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Waiting for the task: (returnval){ [ 590.689947] env[61868]: value = "task-40913" [ 590.689947] env[61868]: _type = "Task" [ 590.689947] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.700340] env[61868]: DEBUG oslo_vmware.api [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Task: {'id': task-40913, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.959219] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40912, 'name': CreateVM_Task, 'duration_secs': 0.295645} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.959459] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 590.959699] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 591.159967] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 591.160229] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 591.160436] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 591.199534] env[61868]: DEBUG oslo_vmware.api [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Task: {'id': task-40913, 'name': ReconfigVM_Task, 'duration_secs': 0.144454} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.199832] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Reconfigured VM instance to enable vnc on port - 5904 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 591.200047] env[61868]: DEBUG oslo_concurrency.lockutils [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.582s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 591.200297] env[61868]: DEBUG oslo_concurrency.lockutils [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 591.200432] env[61868]: DEBUG oslo_concurrency.lockutils [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 591.200752] env[61868]: DEBUG oslo_concurrency.lockutils [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 591.201030] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.241s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 591.201248] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03417ef5-b452-4f73-92b8-8f42b1b5bd9e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.209150] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a7ae330-ef86-4ebc-a47d-060d9f3edf63 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.226301] env[61868]: DEBUG oslo_vmware.api [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Waiting for the task: (returnval){ [ 591.226301] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]528c4d61-98e6-fdec-cf8c-45986c631297" [ 591.226301] env[61868]: _type = "Task" [ 591.226301] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.235333] env[61868]: DEBUG oslo_vmware.api [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]528c4d61-98e6-fdec-cf8c-45986c631297, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.248725] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Reconfiguring VM instance to enable vnc on port - 5905 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 591.249065] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f39e804e-d19c-4927-967c-97685977ef31 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.266401] env[61868]: DEBUG oslo_vmware.api [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Waiting for the task: (returnval){ [ 591.266401] env[61868]: value = "task-40914" [ 591.266401] env[61868]: _type = "Task" [ 591.266401] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.278515] env[61868]: DEBUG oslo_vmware.api [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Task: {'id': task-40914, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.724442] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Acquiring lock "3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 591.724442] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Lock "3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 591.741971] env[61868]: DEBUG oslo_concurrency.lockutils [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 591.742786] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 591.743216] env[61868]: DEBUG oslo_concurrency.lockutils [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 591.743637] env[61868]: DEBUG nova.compute.manager [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 591.778674] env[61868]: DEBUG oslo_vmware.api [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Task: {'id': task-40914, 'name': ReconfigVM_Task, 'duration_secs': 0.115552} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.779169] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Reconfigured VM instance to enable vnc on port - 5905 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 591.779516] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.578s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 591.779898] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 591.780219] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 591.780679] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 591.781353] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39b2569a-5a4f-454f-86c8-393e5e3d000c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.786924] env[61868]: DEBUG oslo_vmware.api [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Waiting for the task: (returnval){ [ 591.786924] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52bb8244-e70b-e7a7-f3e4-14e512647bf6" [ 591.786924] env[61868]: _type = "Task" [ 591.786924] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.798907] env[61868]: DEBUG oslo_vmware.api [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52bb8244-e70b-e7a7-f3e4-14e512647bf6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.826056] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 591.830995] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.005s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 591.834588] env[61868]: INFO nova.compute.claims [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 592.020034] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32144360-9e7d-4343-819b-8954154461a3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.028597] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06cc4ea3-2e91-41df-9cb2-254389c507ca {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.070732] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a491386e-e075-4c66-a2cf-340376da5e23 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.083011] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad0a734-29b7-43be-8021-5cb09e26a5ea {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.104019] env[61868]: DEBUG nova.compute.provider_tree [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 592.114073] env[61868]: DEBUG nova.scheduler.client.report [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 592.145278] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.313s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 592.145278] env[61868]: DEBUG nova.compute.manager [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 592.202685] env[61868]: DEBUG nova.compute.utils [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 592.204913] env[61868]: DEBUG nova.compute.manager [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 592.205225] env[61868]: DEBUG nova.network.neutron [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 592.217518] env[61868]: DEBUG nova.compute.manager [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 592.301243] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 592.301494] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 592.301768] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 592.348388] env[61868]: DEBUG nova.policy [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4a7beef6f6b2442f8547fe8dbf0c6068', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '22e890ee8f954d0aa38216c48816a071', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 592.361116] env[61868]: DEBUG nova.compute.manager [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 592.387903] env[61868]: DEBUG nova.virt.hardware [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 592.388178] env[61868]: DEBUG nova.virt.hardware [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 592.388368] env[61868]: DEBUG nova.virt.hardware [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 592.388511] env[61868]: DEBUG nova.virt.hardware [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 592.388655] env[61868]: DEBUG nova.virt.hardware [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 592.388794] env[61868]: DEBUG nova.virt.hardware [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 592.389153] env[61868]: DEBUG nova.virt.hardware [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 592.389327] env[61868]: DEBUG nova.virt.hardware [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 592.389543] env[61868]: DEBUG nova.virt.hardware [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 592.389728] env[61868]: DEBUG nova.virt.hardware [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 592.389911] env[61868]: DEBUG nova.virt.hardware [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 592.391021] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-413fbce9-4df2-4146-af77-88eb888129b4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.402237] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb79759d-ac0b-4061-acd2-5a406f8780c7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.426967] env[61868]: DEBUG nova.network.neutron [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Successfully created port: af761ca4-c9e4-4908-9053-119f8438c1a2 {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 594.835189] env[61868]: DEBUG nova.network.neutron [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Successfully updated port: af761ca4-c9e4-4908-9053-119f8438c1a2 {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 594.867441] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Acquiring lock "refresh_cache-3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 594.867595] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Acquired lock "refresh_cache-3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 594.867743] env[61868]: DEBUG nova.network.neutron [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 595.010626] env[61868]: DEBUG nova.network.neutron [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 595.637487] env[61868]: DEBUG nova.network.neutron [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Updating instance_info_cache with network_info: [{"id": "af761ca4-c9e4-4908-9053-119f8438c1a2", "address": "fa:16:3e:07:69:7c", "network": {"id": "237c7a76-9a1b-406d-9000-6d329d4e2639", "bridge": "br-int", "label": "tempest-ServersTestJSON-455559827-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "22e890ee8f954d0aa38216c48816a071", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf761ca4-c9", "ovs_interfaceid": "af761ca4-c9e4-4908-9053-119f8438c1a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.659338] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Releasing lock "refresh_cache-3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 595.659738] env[61868]: DEBUG nova.compute.manager [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Instance network_info: |[{"id": "af761ca4-c9e4-4908-9053-119f8438c1a2", "address": "fa:16:3e:07:69:7c", "network": {"id": "237c7a76-9a1b-406d-9000-6d329d4e2639", "bridge": "br-int", "label": "tempest-ServersTestJSON-455559827-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "22e890ee8f954d0aa38216c48816a071", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf761ca4-c9", "ovs_interfaceid": "af761ca4-c9e4-4908-9053-119f8438c1a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 595.660144] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:69:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3ff3baee-99ce-4b51-ae98-efc6163aaab3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'af761ca4-c9e4-4908-9053-119f8438c1a2', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 595.668983] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Creating folder: Project (22e890ee8f954d0aa38216c48816a071). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 595.669785] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e1c60606-b6b6-4105-b004-bda851001a04 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.681799] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Created folder: Project (22e890ee8f954d0aa38216c48816a071) in parent group-v18181. [ 595.681960] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Creating folder: Instances. Parent ref: group-v18200. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 595.682836] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e5c580cd-0cab-4bae-a78b-8bb98bc7d19f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.697098] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Created folder: Instances in parent group-v18200. [ 595.697357] env[61868]: DEBUG oslo.service.loopingcall [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 595.697558] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 595.697765] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-213da1e9-b6cb-4d9b-9900-81fe6eb0e157 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.720147] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 595.720147] env[61868]: value = "task-40917" [ 595.720147] env[61868]: _type = "Task" [ 595.720147] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.731646] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40917, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.233240] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40917, 'name': CreateVM_Task, 'duration_secs': 0.322925} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.233526] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 596.250182] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 596.250445] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 596.254882] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e0ea845-f521-4d1c-9afb-9572ba3f038f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.289850] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Reconfiguring VM instance to enable vnc on port - 5906 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 596.289850] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4e9c74e-b86e-4479-a75b-56700cb664ef {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.308939] env[61868]: DEBUG oslo_vmware.api [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Waiting for the task: (returnval){ [ 596.308939] env[61868]: value = "task-40918" [ 596.308939] env[61868]: _type = "Task" [ 596.308939] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.322396] env[61868]: DEBUG oslo_vmware.api [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Task: {'id': task-40918, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.821344] env[61868]: DEBUG oslo_vmware.api [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Task: {'id': task-40918, 'name': ReconfigVM_Task, 'duration_secs': 0.122688} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.821976] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Reconfigured VM instance to enable vnc on port - 5906 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 596.822169] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.572s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 596.822548] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 596.822824] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 596.823799] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 596.823799] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c49a0e7-3e11-405a-940c-c6c5b6202c96 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.829243] env[61868]: DEBUG oslo_vmware.api [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Waiting for the task: (returnval){ [ 596.829243] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]526a5d98-b273-84a9-8baf-6a5845d334d3" [ 596.829243] env[61868]: _type = "Task" [ 596.829243] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.838322] env[61868]: DEBUG oslo_vmware.api [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]526a5d98-b273-84a9-8baf-6a5845d334d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.008687] env[61868]: DEBUG nova.compute.manager [req-2c52857a-f30f-4892-87c9-ec0c92decab9 req-3bf179ab-980b-4401-aa91-f16c69f814b0 service nova] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Received event network-vif-plugged-af761ca4-c9e4-4908-9053-119f8438c1a2 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 597.008687] env[61868]: DEBUG oslo_concurrency.lockutils [req-2c52857a-f30f-4892-87c9-ec0c92decab9 req-3bf179ab-980b-4401-aa91-f16c69f814b0 service nova] Acquiring lock "3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 597.008687] env[61868]: DEBUG oslo_concurrency.lockutils [req-2c52857a-f30f-4892-87c9-ec0c92decab9 req-3bf179ab-980b-4401-aa91-f16c69f814b0 service nova] Lock "3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 597.008882] env[61868]: DEBUG oslo_concurrency.lockutils [req-2c52857a-f30f-4892-87c9-ec0c92decab9 req-3bf179ab-980b-4401-aa91-f16c69f814b0 service nova] Lock "3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 597.008926] env[61868]: DEBUG nova.compute.manager [req-2c52857a-f30f-4892-87c9-ec0c92decab9 req-3bf179ab-980b-4401-aa91-f16c69f814b0 service nova] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] No waiting events found dispatching network-vif-plugged-af761ca4-c9e4-4908-9053-119f8438c1a2 {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 597.009095] env[61868]: WARNING nova.compute.manager [req-2c52857a-f30f-4892-87c9-ec0c92decab9 req-3bf179ab-980b-4401-aa91-f16c69f814b0 service nova] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Received unexpected event network-vif-plugged-af761ca4-c9e4-4908-9053-119f8438c1a2 for instance with vm_state building and task_state spawning. [ 597.341418] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 597.341846] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 597.341951] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 598.588168] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Acquiring lock "882b26da-9f56-4bec-b10b-7b46b4c6ae04" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 598.588625] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Lock "882b26da-9f56-4bec-b10b-7b46b4c6ae04" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 598.601744] env[61868]: DEBUG nova.compute.manager [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 598.677926] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 598.678219] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 598.679725] env[61868]: INFO nova.compute.claims [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 598.913257] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44eebe0f-6595-4e8e-a009-ee02496b52ce {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.921876] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0bc009b-2dda-4218-952a-4c83b2d8e816 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.960178] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1e29511-6e96-40fd-8a11-522453e50096 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.969020] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d532439-6b88-4c09-ae3a-5c2daa4e8978 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.983831] env[61868]: DEBUG nova.compute.provider_tree [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 598.994403] env[61868]: DEBUG nova.scheduler.client.report [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 599.012273] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.334s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 599.012852] env[61868]: DEBUG nova.compute.manager [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 599.054799] env[61868]: DEBUG nova.compute.utils [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 599.056252] env[61868]: DEBUG nova.compute.manager [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 599.056469] env[61868]: DEBUG nova.network.neutron [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 599.071524] env[61868]: DEBUG nova.compute.manager [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 599.167709] env[61868]: DEBUG nova.compute.manager [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 599.211269] env[61868]: DEBUG nova.virt.hardware [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 599.211466] env[61868]: DEBUG nova.virt.hardware [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 599.211666] env[61868]: DEBUG nova.virt.hardware [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 599.211866] env[61868]: DEBUG nova.virt.hardware [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 599.212059] env[61868]: DEBUG nova.virt.hardware [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 599.212217] env[61868]: DEBUG nova.virt.hardware [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 599.212421] env[61868]: DEBUG nova.virt.hardware [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 599.212574] env[61868]: DEBUG nova.virt.hardware [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 599.212734] env[61868]: DEBUG nova.virt.hardware [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 599.212890] env[61868]: DEBUG nova.virt.hardware [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 599.213053] env[61868]: DEBUG nova.virt.hardware [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 599.216195] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6fb78cd-78f7-499a-a7c5-e72771ea8ac2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.236963] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bb16025-6263-465b-af0a-6fec248bc56e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.310215] env[61868]: DEBUG nova.network.neutron [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] No network configured {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1188}} [ 599.310545] env[61868]: DEBUG nova.compute.manager [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Instance network_info: |[]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 599.310862] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Instance VIF info [] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 599.316473] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Creating folder: Project (7ce618e3d0f14d46a44ea34699229e48). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 599.316763] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5bf04b74-7e93-4fc9-bbe9-48b59d409343 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.328460] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Created folder: Project (7ce618e3d0f14d46a44ea34699229e48) in parent group-v18181. [ 599.328592] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Creating folder: Instances. Parent ref: group-v18203. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 599.328777] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f586253c-8bfc-44c2-9e6b-9beffe6433dd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.339121] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Created folder: Instances in parent group-v18203. [ 599.339121] env[61868]: DEBUG oslo.service.loopingcall [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 599.339121] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 599.339121] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-102159cb-168c-49c9-8cfc-faabaea6f20f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.357518] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 599.357518] env[61868]: value = "task-40921" [ 599.357518] env[61868]: _type = "Task" [ 599.357518] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.369814] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40921, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.867618] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40921, 'name': CreateVM_Task, 'duration_secs': 0.379174} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.867890] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 599.868151] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 599.868433] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 599.874071] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cdcbddf-c5a2-4b63-99f1-956639e9f41d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.928954] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Reconfiguring VM instance to enable vnc on port - 5907 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 599.929351] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f86d8e9d-2657-4329-90d0-2676e29a6f7e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.947130] env[61868]: DEBUG oslo_vmware.api [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Waiting for the task: (returnval){ [ 599.947130] env[61868]: value = "task-40922" [ 599.947130] env[61868]: _type = "Task" [ 599.947130] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.955937] env[61868]: DEBUG oslo_vmware.api [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Task: {'id': task-40922, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.314295] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Acquiring lock "4cfa680a-0ea3-4c40-b89c-b6067397427a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 600.314571] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Lock "4cfa680a-0ea3-4c40-b89c-b6067397427a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 600.330868] env[61868]: DEBUG nova.compute.manager [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 600.405238] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 600.405498] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 600.407031] env[61868]: INFO nova.compute.claims [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 600.459568] env[61868]: DEBUG oslo_vmware.api [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Task: {'id': task-40922, 'name': ReconfigVM_Task, 'duration_secs': 0.122866} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.460453] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Reconfigured VM instance to enable vnc on port - 5907 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 600.460698] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.592s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 600.460940] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 600.461080] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 600.461392] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 600.463964] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a093d6af-8bb7-4c41-8694-ca38252301d2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.470236] env[61868]: DEBUG oslo_vmware.api [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Waiting for the task: (returnval){ [ 600.470236] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]520c2b90-6c75-9eff-db36-04c5a0a4faa5" [ 600.470236] env[61868]: _type = "Task" [ 600.470236] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.483040] env[61868]: DEBUG oslo_vmware.api [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]520c2b90-6c75-9eff-db36-04c5a0a4faa5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.621412] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-929e3002-c40f-47a8-bba5-f8ff520802b1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.629609] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd0175a-491e-4ed8-a87d-5f9cdb653014 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.666057] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce22b73-92be-47a9-8cb9-666253f78351 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.674542] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf95e50d-7ebc-43a3-817d-636002241924 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.690311] env[61868]: DEBUG nova.compute.provider_tree [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 600.701985] env[61868]: DEBUG nova.scheduler.client.report [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 600.721763] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.316s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 600.722272] env[61868]: DEBUG nova.compute.manager [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 600.769990] env[61868]: DEBUG nova.compute.utils [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 600.771356] env[61868]: DEBUG nova.compute.manager [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 600.771521] env[61868]: DEBUG nova.network.neutron [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 600.783115] env[61868]: DEBUG nova.compute.manager [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 600.875753] env[61868]: DEBUG nova.compute.manager [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 600.900619] env[61868]: DEBUG nova.virt.hardware [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 600.900901] env[61868]: DEBUG nova.virt.hardware [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 600.901058] env[61868]: DEBUG nova.virt.hardware [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 600.901236] env[61868]: DEBUG nova.virt.hardware [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 600.901377] env[61868]: DEBUG nova.virt.hardware [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 600.901518] env[61868]: DEBUG nova.virt.hardware [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 600.901826] env[61868]: DEBUG nova.virt.hardware [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 600.901951] env[61868]: DEBUG nova.virt.hardware [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 600.902037] env[61868]: DEBUG nova.virt.hardware [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 600.902192] env[61868]: DEBUG nova.virt.hardware [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 600.902357] env[61868]: DEBUG nova.virt.hardware [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 600.903241] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fb6a2a1-0db6-4b6e-bc87-7b38d46836fb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.911918] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fdb1a0d-0978-4c73-88c9-7961d9a817f4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.947162] env[61868]: DEBUG nova.policy [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0fafc3b33f104fa9933823e65f8c0be9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8fe44517f4444d5e8752f9adcae734ea', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 600.981537] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 600.981819] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 600.982037] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 601.514054] env[61868]: ERROR oslo_messaging.rpc.server [None req-8dfb64db-d8f7-406a-a329-3beadb0e391a tempest-HypervisorAdminTestJSON-2043361277 tempest-HypervisorAdminTestJSON-2043361277-project-admin] Exception during message handling: NotImplementedError: Multiple hosts may be managed by the VMWare vCenter driver; therefore we do not return uptime for just one host. [ 601.514054] env[61868]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 601.514054] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 601.514054] env[61868]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 601.514054] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 601.514054] env[61868]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 601.514054] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 601.514054] env[61868]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 601.514054] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 601.514054] env[61868]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 601.514054] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 601.514054] env[61868]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 601.514054] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 601.514054] env[61868]: ERROR oslo_messaging.rpc.server raise self.value [ 601.514054] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 601.514525] env[61868]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 601.514525] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6779, in get_host_uptime [ 601.514525] env[61868]: ERROR oslo_messaging.rpc.server return self.driver.get_host_uptime() [ 601.514525] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 707, in get_host_uptime [ 601.514525] env[61868]: ERROR oslo_messaging.rpc.server raise NotImplementedError(msg) [ 601.514525] env[61868]: ERROR oslo_messaging.rpc.server NotImplementedError: Multiple hosts may be managed by the VMWare vCenter driver; therefore we do not return uptime for just one host. [ 601.514525] env[61868]: ERROR oslo_messaging.rpc.server [ 601.586607] env[61868]: DEBUG nova.compute.manager [req-2fb097f8-78d8-4806-ac87-045f3d156931 req-9fc56ac5-7213-40c9-bf2d-56cb1c040151 service nova] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Received event network-changed-af761ca4-c9e4-4908-9053-119f8438c1a2 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 601.586607] env[61868]: DEBUG nova.compute.manager [req-2fb097f8-78d8-4806-ac87-045f3d156931 req-9fc56ac5-7213-40c9-bf2d-56cb1c040151 service nova] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Refreshing instance network info cache due to event network-changed-af761ca4-c9e4-4908-9053-119f8438c1a2. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 601.586758] env[61868]: DEBUG oslo_concurrency.lockutils [req-2fb097f8-78d8-4806-ac87-045f3d156931 req-9fc56ac5-7213-40c9-bf2d-56cb1c040151 service nova] Acquiring lock "refresh_cache-3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 601.587812] env[61868]: DEBUG oslo_concurrency.lockutils [req-2fb097f8-78d8-4806-ac87-045f3d156931 req-9fc56ac5-7213-40c9-bf2d-56cb1c040151 service nova] Acquired lock "refresh_cache-3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 601.587812] env[61868]: DEBUG nova.network.neutron [req-2fb097f8-78d8-4806-ac87-045f3d156931 req-9fc56ac5-7213-40c9-bf2d-56cb1c040151 service nova] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Refreshing network info cache for port af761ca4-c9e4-4908-9053-119f8438c1a2 {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 602.710668] env[61868]: DEBUG nova.network.neutron [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Successfully created port: 7ef38466-6323-45ab-a015-6bf146d59bbd {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 603.041333] env[61868]: DEBUG nova.network.neutron [req-2fb097f8-78d8-4806-ac87-045f3d156931 req-9fc56ac5-7213-40c9-bf2d-56cb1c040151 service nova] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Updated VIF entry in instance network info cache for port af761ca4-c9e4-4908-9053-119f8438c1a2. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 603.041764] env[61868]: DEBUG nova.network.neutron [req-2fb097f8-78d8-4806-ac87-045f3d156931 req-9fc56ac5-7213-40c9-bf2d-56cb1c040151 service nova] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Updating instance_info_cache with network_info: [{"id": "af761ca4-c9e4-4908-9053-119f8438c1a2", "address": "fa:16:3e:07:69:7c", "network": {"id": "237c7a76-9a1b-406d-9000-6d329d4e2639", "bridge": "br-int", "label": "tempest-ServersTestJSON-455559827-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "22e890ee8f954d0aa38216c48816a071", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf761ca4-c9", "ovs_interfaceid": "af761ca4-c9e4-4908-9053-119f8438c1a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.059822] env[61868]: DEBUG oslo_concurrency.lockutils [req-2fb097f8-78d8-4806-ac87-045f3d156931 req-9fc56ac5-7213-40c9-bf2d-56cb1c040151 service nova] Releasing lock "refresh_cache-3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 603.475040] env[61868]: DEBUG oslo_concurrency.lockutils [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Acquiring lock "ffe7c98f-1367-44fa-b8a2-f34b8de7dfde" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 603.475261] env[61868]: DEBUG oslo_concurrency.lockutils [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Lock "ffe7c98f-1367-44fa-b8a2-f34b8de7dfde" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 603.517421] env[61868]: DEBUG nova.compute.manager [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 603.595683] env[61868]: DEBUG oslo_concurrency.lockutils [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 603.595930] env[61868]: DEBUG oslo_concurrency.lockutils [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 603.597542] env[61868]: INFO nova.compute.claims [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 603.873742] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e17274-e9d3-4f2d-93b5-8bd9eabfc88c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.882075] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-547498eb-adc7-4ad0-b4a1-9f5c0a00df16 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.922914] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66840bbb-1ba9-4f97-86da-0ca472d4837e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.937384] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89974717-cc74-436a-b160-95ab15416c26 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.954835] env[61868]: DEBUG nova.compute.provider_tree [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 603.964940] env[61868]: DEBUG nova.scheduler.client.report [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 603.983029] env[61868]: DEBUG oslo_concurrency.lockutils [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.387s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 603.983809] env[61868]: DEBUG nova.compute.manager [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 604.025658] env[61868]: DEBUG nova.compute.utils [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 604.028441] env[61868]: DEBUG nova.compute.manager [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 604.028929] env[61868]: DEBUG nova.network.neutron [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 604.048588] env[61868]: DEBUG nova.compute.manager [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 604.179281] env[61868]: DEBUG nova.compute.manager [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 604.361301] env[61868]: DEBUG nova.virt.hardware [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 604.361301] env[61868]: DEBUG nova.virt.hardware [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 604.361301] env[61868]: DEBUG nova.virt.hardware [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 604.362795] env[61868]: DEBUG nova.virt.hardware [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 604.362795] env[61868]: DEBUG nova.virt.hardware [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 604.362795] env[61868]: DEBUG nova.virt.hardware [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 604.362795] env[61868]: DEBUG nova.virt.hardware [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 604.362795] env[61868]: DEBUG nova.virt.hardware [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 604.363289] env[61868]: DEBUG nova.virt.hardware [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 604.363289] env[61868]: DEBUG nova.virt.hardware [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 604.363289] env[61868]: DEBUG nova.virt.hardware [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 604.363289] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa7ef233-47e0-48f3-b280-660179e45dd8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.373449] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a1b877-672b-4442-9ddb-ef5447aa4d7a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.519232] env[61868]: DEBUG nova.policy [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '292b5aeb0ebb487ca46e9f61f2bbc577', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2060ef1ed49e4b789d28e03a235d3369', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 606.827706] env[61868]: DEBUG nova.network.neutron [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Successfully created port: 962e2556-54e1-4677-bf83-97889db7f5cc {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 606.929027] env[61868]: DEBUG nova.network.neutron [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Successfully updated port: 7ef38466-6323-45ab-a015-6bf146d59bbd {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 606.943454] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Acquiring lock "refresh_cache-4cfa680a-0ea3-4c40-b89c-b6067397427a" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 606.943604] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Acquired lock "refresh_cache-4cfa680a-0ea3-4c40-b89c-b6067397427a" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 606.943767] env[61868]: DEBUG nova.network.neutron [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 607.094778] env[61868]: DEBUG nova.network.neutron [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 607.578626] env[61868]: DEBUG nova.network.neutron [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Updating instance_info_cache with network_info: [{"id": "7ef38466-6323-45ab-a015-6bf146d59bbd", "address": "fa:16:3e:83:6b:f3", "network": {"id": "15cf4fd7-30c4-4004-a6a0-4c4939ff4c4f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-197546246-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "8fe44517f4444d5e8752f9adcae734ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ef38466-63", "ovs_interfaceid": "7ef38466-6323-45ab-a015-6bf146d59bbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.600814] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Releasing lock "refresh_cache-4cfa680a-0ea3-4c40-b89c-b6067397427a" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 607.601112] env[61868]: DEBUG nova.compute.manager [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Instance network_info: |[{"id": "7ef38466-6323-45ab-a015-6bf146d59bbd", "address": "fa:16:3e:83:6b:f3", "network": {"id": "15cf4fd7-30c4-4004-a6a0-4c4939ff4c4f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-197546246-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "8fe44517f4444d5e8752f9adcae734ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ef38466-63", "ovs_interfaceid": "7ef38466-6323-45ab-a015-6bf146d59bbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 607.601533] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:6b:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4406a73e-2189-46ac-9e96-4f0af80b5094', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7ef38466-6323-45ab-a015-6bf146d59bbd', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 607.611499] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Creating folder: Project (8fe44517f4444d5e8752f9adcae734ea). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 607.612389] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-506d88c2-2e39-44e7-b914-d5926de1917e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.625829] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Created folder: Project (8fe44517f4444d5e8752f9adcae734ea) in parent group-v18181. [ 607.626032] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Creating folder: Instances. Parent ref: group-v18206. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 607.626283] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9aa36bdc-a4f3-423f-9b06-0f124169428a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.636215] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Created folder: Instances in parent group-v18206. [ 607.639427] env[61868]: DEBUG oslo.service.loopingcall [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 607.639427] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 607.639427] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0a1c3519-55f8-4b39-9a0a-71e9e66d7ddc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.663340] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 607.663340] env[61868]: value = "task-40925" [ 607.663340] env[61868]: _type = "Task" [ 607.663340] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.674644] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40925, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.927035] env[61868]: DEBUG nova.compute.manager [req-858b7697-61e8-48d0-afd8-770e6eb195c4 req-acc42dec-5537-44bf-a246-d476428c3301 service nova] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Received event network-vif-plugged-7ef38466-6323-45ab-a015-6bf146d59bbd {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 607.927336] env[61868]: DEBUG oslo_concurrency.lockutils [req-858b7697-61e8-48d0-afd8-770e6eb195c4 req-acc42dec-5537-44bf-a246-d476428c3301 service nova] Acquiring lock "4cfa680a-0ea3-4c40-b89c-b6067397427a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 607.927474] env[61868]: DEBUG oslo_concurrency.lockutils [req-858b7697-61e8-48d0-afd8-770e6eb195c4 req-acc42dec-5537-44bf-a246-d476428c3301 service nova] Lock "4cfa680a-0ea3-4c40-b89c-b6067397427a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 607.927647] env[61868]: DEBUG oslo_concurrency.lockutils [req-858b7697-61e8-48d0-afd8-770e6eb195c4 req-acc42dec-5537-44bf-a246-d476428c3301 service nova] Lock "4cfa680a-0ea3-4c40-b89c-b6067397427a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 607.927833] env[61868]: DEBUG nova.compute.manager [req-858b7697-61e8-48d0-afd8-770e6eb195c4 req-acc42dec-5537-44bf-a246-d476428c3301 service nova] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] No waiting events found dispatching network-vif-plugged-7ef38466-6323-45ab-a015-6bf146d59bbd {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 607.928037] env[61868]: WARNING nova.compute.manager [req-858b7697-61e8-48d0-afd8-770e6eb195c4 req-acc42dec-5537-44bf-a246-d476428c3301 service nova] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Received unexpected event network-vif-plugged-7ef38466-6323-45ab-a015-6bf146d59bbd for instance with vm_state building and task_state spawning. [ 608.009066] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "4ed52e2d-018f-4405-9380-0c7f62ef2db3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 608.009353] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "4ed52e2d-018f-4405-9380-0c7f62ef2db3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 608.173465] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40925, 'name': CreateVM_Task, 'duration_secs': 0.334357} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.173647] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 608.174251] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 608.174479] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 608.177305] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32b1c1a-4201-4e72-a7fd-df68c50cb974 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.210601] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Reconfiguring VM instance to enable vnc on port - 5908 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 608.211092] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18d4a063-7e78-401b-ba3e-aa109fe1598b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.228013] env[61868]: DEBUG oslo_vmware.api [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Waiting for the task: (returnval){ [ 608.228013] env[61868]: value = "task-40926" [ 608.228013] env[61868]: _type = "Task" [ 608.228013] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.237902] env[61868]: DEBUG oslo_vmware.api [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Task: {'id': task-40926, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.522122] env[61868]: DEBUG nova.network.neutron [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Successfully updated port: 962e2556-54e1-4677-bf83-97889db7f5cc {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 608.537043] env[61868]: DEBUG oslo_concurrency.lockutils [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Acquiring lock "refresh_cache-ffe7c98f-1367-44fa-b8a2-f34b8de7dfde" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 608.537043] env[61868]: DEBUG oslo_concurrency.lockutils [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Acquired lock "refresh_cache-ffe7c98f-1367-44fa-b8a2-f34b8de7dfde" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 608.537043] env[61868]: DEBUG nova.network.neutron [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 608.592019] env[61868]: DEBUG nova.network.neutron [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 608.738906] env[61868]: DEBUG oslo_vmware.api [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Task: {'id': task-40926, 'name': ReconfigVM_Task, 'duration_secs': 0.114537} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.739232] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Reconfigured VM instance to enable vnc on port - 5908 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 608.739497] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.565s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 608.739803] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 608.740105] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 608.740428] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 608.740700] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0998e0d0-988b-4cf2-8cd6-a3999ab029a6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.745967] env[61868]: DEBUG oslo_vmware.api [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Waiting for the task: (returnval){ [ 608.745967] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52e10a2f-4004-dae0-471e-2c97324ae177" [ 608.745967] env[61868]: _type = "Task" [ 608.745967] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.756761] env[61868]: DEBUG oslo_vmware.api [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52e10a2f-4004-dae0-471e-2c97324ae177, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.128924] env[61868]: DEBUG nova.network.neutron [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Updating instance_info_cache with network_info: [{"id": "962e2556-54e1-4677-bf83-97889db7f5cc", "address": "fa:16:3e:9d:ae:f5", "network": {"id": "66ef860b-d0b2-47a0-acfa-19f4014921b1", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-372785353-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "2060ef1ed49e4b789d28e03a235d3369", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap962e2556-54", "ovs_interfaceid": "962e2556-54e1-4677-bf83-97889db7f5cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.166621] env[61868]: DEBUG oslo_concurrency.lockutils [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Releasing lock "refresh_cache-ffe7c98f-1367-44fa-b8a2-f34b8de7dfde" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 609.166621] env[61868]: DEBUG nova.compute.manager [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Instance network_info: |[{"id": "962e2556-54e1-4677-bf83-97889db7f5cc", "address": "fa:16:3e:9d:ae:f5", "network": {"id": "66ef860b-d0b2-47a0-acfa-19f4014921b1", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-372785353-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "2060ef1ed49e4b789d28e03a235d3369", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap962e2556-54", "ovs_interfaceid": "962e2556-54e1-4677-bf83-97889db7f5cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 609.166747] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:ae:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd33839ae-40ca-471b-92e3-eb282b920682', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '962e2556-54e1-4677-bf83-97889db7f5cc', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 609.174471] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Creating folder: Project (2060ef1ed49e4b789d28e03a235d3369). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 609.175166] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8e8a6c71-b977-4d68-8367-0cff2c4b0fd9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.202136] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Created folder: Project (2060ef1ed49e4b789d28e03a235d3369) in parent group-v18181. [ 609.202136] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Creating folder: Instances. Parent ref: group-v18209. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 609.202136] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-35a4eccc-7fda-4577-98c1-149bc4aae9aa {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.202136] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Created folder: Instances in parent group-v18209. [ 609.202136] env[61868]: DEBUG oslo.service.loopingcall [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 609.202136] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 609.202340] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-87d588c3-96f0-44a9-93fd-b6ce4862ff4c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.226394] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 609.226394] env[61868]: value = "task-40929" [ 609.226394] env[61868]: _type = "Task" [ 609.226394] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.244247] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40929, 'name': CreateVM_Task} progress is 5%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.271026] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 609.271371] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 609.271590] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 609.581562] env[61868]: DEBUG oslo_concurrency.lockutils [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Acquiring lock "2466fe4e-2589-4417-a63a-4d8bc695109d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 609.581814] env[61868]: DEBUG oslo_concurrency.lockutils [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Lock "2466fe4e-2589-4417-a63a-4d8bc695109d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 609.739414] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40929, 'name': CreateVM_Task, 'duration_secs': 0.342208} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.739611] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 609.740452] env[61868]: DEBUG oslo_concurrency.lockutils [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 609.740680] env[61868]: DEBUG oslo_concurrency.lockutils [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 609.744914] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b9fd0e2-87d7-41b5-a2cb-ec4c795a1f1d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.800204] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Reconfiguring VM instance to enable vnc on port - 5909 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 609.801613] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b475017b-1659-48e3-9be5-5501db96e447 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.830240] env[61868]: DEBUG oslo_vmware.api [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Waiting for the task: (returnval){ [ 609.830240] env[61868]: value = "task-40930" [ 609.830240] env[61868]: _type = "Task" [ 609.830240] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.843998] env[61868]: DEBUG oslo_vmware.api [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Task: {'id': task-40930, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.340855] env[61868]: DEBUG oslo_vmware.api [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Task: {'id': task-40930, 'name': ReconfigVM_Task, 'duration_secs': 0.142311} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.341240] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Reconfigured VM instance to enable vnc on port - 5909 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 610.341368] env[61868]: DEBUG oslo_concurrency.lockutils [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.601s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 610.341614] env[61868]: DEBUG oslo_concurrency.lockutils [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 610.341764] env[61868]: DEBUG oslo_concurrency.lockutils [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 610.342081] env[61868]: DEBUG oslo_concurrency.lockutils [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 610.342364] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60dcccad-0526-4cec-abd6-f3d71a6a4664 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.348565] env[61868]: DEBUG oslo_vmware.api [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Waiting for the task: (returnval){ [ 610.348565] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]523d1955-73ae-db8f-10ac-320f271f7694" [ 610.348565] env[61868]: _type = "Task" [ 610.348565] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.357880] env[61868]: DEBUG oslo_vmware.api [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]523d1955-73ae-db8f-10ac-320f271f7694, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.858738] env[61868]: DEBUG oslo_concurrency.lockutils [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 610.858945] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 610.859156] env[61868]: DEBUG oslo_concurrency.lockutils [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 610.880073] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Acquiring lock "1df194bf-fa9b-4d03-9b20-8478147de566" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 610.880302] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Lock "1df194bf-fa9b-4d03-9b20-8478147de566" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 611.566379] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Acquiring lock "4cec72dc-99c1-4cf9-b391-a909bab7fb23" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 611.566786] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Lock "4cec72dc-99c1-4cf9-b391-a909bab7fb23" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 611.915337] env[61868]: DEBUG oslo_concurrency.lockutils [None req-395ee023-b17e-4a59-b830-7a86389c0fc3 tempest-ServerDiagnosticsNegativeTest-262233249 tempest-ServerDiagnosticsNegativeTest-262233249-project-member] Acquiring lock "24d274d6-04c7-4f4a-941b-31c539054dc5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 611.915594] env[61868]: DEBUG oslo_concurrency.lockutils [None req-395ee023-b17e-4a59-b830-7a86389c0fc3 tempest-ServerDiagnosticsNegativeTest-262233249 tempest-ServerDiagnosticsNegativeTest-262233249-project-member] Lock "24d274d6-04c7-4f4a-941b-31c539054dc5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 612.039020] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5472dfff-c8d5-41da-9320-d3ced8109709 tempest-ServersWithSpecificFlavorTestJSON-818828595 tempest-ServersWithSpecificFlavorTestJSON-818828595-project-member] Acquiring lock "0e6a35a2-c4cf-4e4a-9f87-45d0d4c5fead" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 612.039245] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5472dfff-c8d5-41da-9320-d3ced8109709 tempest-ServersWithSpecificFlavorTestJSON-818828595 tempest-ServersWithSpecificFlavorTestJSON-818828595-project-member] Lock "0e6a35a2-c4cf-4e4a-9f87-45d0d4c5fead" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 612.992911] env[61868]: DEBUG nova.compute.manager [req-7b0e6dd2-cb56-4bf2-88bb-a5cacea381a9 req-008a0b29-c923-4965-ac93-d0730fbe8731 service nova] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Received event network-changed-7ef38466-6323-45ab-a015-6bf146d59bbd {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 612.993233] env[61868]: DEBUG nova.compute.manager [req-7b0e6dd2-cb56-4bf2-88bb-a5cacea381a9 req-008a0b29-c923-4965-ac93-d0730fbe8731 service nova] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Refreshing instance network info cache due to event network-changed-7ef38466-6323-45ab-a015-6bf146d59bbd. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 612.994725] env[61868]: DEBUG oslo_concurrency.lockutils [req-7b0e6dd2-cb56-4bf2-88bb-a5cacea381a9 req-008a0b29-c923-4965-ac93-d0730fbe8731 service nova] Acquiring lock "refresh_cache-4cfa680a-0ea3-4c40-b89c-b6067397427a" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 612.994725] env[61868]: DEBUG oslo_concurrency.lockutils [req-7b0e6dd2-cb56-4bf2-88bb-a5cacea381a9 req-008a0b29-c923-4965-ac93-d0730fbe8731 service nova] Acquired lock "refresh_cache-4cfa680a-0ea3-4c40-b89c-b6067397427a" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 612.994725] env[61868]: DEBUG nova.network.neutron [req-7b0e6dd2-cb56-4bf2-88bb-a5cacea381a9 req-008a0b29-c923-4965-ac93-d0730fbe8731 service nova] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Refreshing network info cache for port 7ef38466-6323-45ab-a015-6bf146d59bbd {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 613.425018] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e0f96591-7ddd-46a5-b530-a359f1da089b tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Acquiring lock "a19a14cd-b92d-4b3d-a06a-75b59333d1af" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 613.425621] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e0f96591-7ddd-46a5-b530-a359f1da089b tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Lock "a19a14cd-b92d-4b3d-a06a-75b59333d1af" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 613.569114] env[61868]: DEBUG nova.network.neutron [req-7b0e6dd2-cb56-4bf2-88bb-a5cacea381a9 req-008a0b29-c923-4965-ac93-d0730fbe8731 service nova] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Updated VIF entry in instance network info cache for port 7ef38466-6323-45ab-a015-6bf146d59bbd. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 613.569527] env[61868]: DEBUG nova.network.neutron [req-7b0e6dd2-cb56-4bf2-88bb-a5cacea381a9 req-008a0b29-c923-4965-ac93-d0730fbe8731 service nova] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Updating instance_info_cache with network_info: [{"id": "7ef38466-6323-45ab-a015-6bf146d59bbd", "address": "fa:16:3e:83:6b:f3", "network": {"id": "15cf4fd7-30c4-4004-a6a0-4c4939ff4c4f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-197546246-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "8fe44517f4444d5e8752f9adcae734ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ef38466-63", "ovs_interfaceid": "7ef38466-6323-45ab-a015-6bf146d59bbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.579375] env[61868]: DEBUG oslo_concurrency.lockutils [req-7b0e6dd2-cb56-4bf2-88bb-a5cacea381a9 req-008a0b29-c923-4965-ac93-d0730fbe8731 service nova] Releasing lock "refresh_cache-4cfa680a-0ea3-4c40-b89c-b6067397427a" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 613.579610] env[61868]: DEBUG nova.compute.manager [req-7b0e6dd2-cb56-4bf2-88bb-a5cacea381a9 req-008a0b29-c923-4965-ac93-d0730fbe8731 service nova] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Received event network-vif-plugged-962e2556-54e1-4677-bf83-97889db7f5cc {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 613.579794] env[61868]: DEBUG oslo_concurrency.lockutils [req-7b0e6dd2-cb56-4bf2-88bb-a5cacea381a9 req-008a0b29-c923-4965-ac93-d0730fbe8731 service nova] Acquiring lock "ffe7c98f-1367-44fa-b8a2-f34b8de7dfde-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 613.579983] env[61868]: DEBUG oslo_concurrency.lockutils [req-7b0e6dd2-cb56-4bf2-88bb-a5cacea381a9 req-008a0b29-c923-4965-ac93-d0730fbe8731 service nova] Lock "ffe7c98f-1367-44fa-b8a2-f34b8de7dfde-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 613.580179] env[61868]: DEBUG oslo_concurrency.lockutils [req-7b0e6dd2-cb56-4bf2-88bb-a5cacea381a9 req-008a0b29-c923-4965-ac93-d0730fbe8731 service nova] Lock "ffe7c98f-1367-44fa-b8a2-f34b8de7dfde-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 613.580340] env[61868]: DEBUG nova.compute.manager [req-7b0e6dd2-cb56-4bf2-88bb-a5cacea381a9 req-008a0b29-c923-4965-ac93-d0730fbe8731 service nova] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] No waiting events found dispatching network-vif-plugged-962e2556-54e1-4677-bf83-97889db7f5cc {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 613.580541] env[61868]: WARNING nova.compute.manager [req-7b0e6dd2-cb56-4bf2-88bb-a5cacea381a9 req-008a0b29-c923-4965-ac93-d0730fbe8731 service nova] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Received unexpected event network-vif-plugged-962e2556-54e1-4677-bf83-97889db7f5cc for instance with vm_state building and task_state spawning. [ 613.580726] env[61868]: DEBUG nova.compute.manager [req-7b0e6dd2-cb56-4bf2-88bb-a5cacea381a9 req-008a0b29-c923-4965-ac93-d0730fbe8731 service nova] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Received event network-changed-962e2556-54e1-4677-bf83-97889db7f5cc {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 613.580877] env[61868]: DEBUG nova.compute.manager [req-7b0e6dd2-cb56-4bf2-88bb-a5cacea381a9 req-008a0b29-c923-4965-ac93-d0730fbe8731 service nova] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Refreshing instance network info cache due to event network-changed-962e2556-54e1-4677-bf83-97889db7f5cc. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 613.581057] env[61868]: DEBUG oslo_concurrency.lockutils [req-7b0e6dd2-cb56-4bf2-88bb-a5cacea381a9 req-008a0b29-c923-4965-ac93-d0730fbe8731 service nova] Acquiring lock "refresh_cache-ffe7c98f-1367-44fa-b8a2-f34b8de7dfde" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 613.581187] env[61868]: DEBUG oslo_concurrency.lockutils [req-7b0e6dd2-cb56-4bf2-88bb-a5cacea381a9 req-008a0b29-c923-4965-ac93-d0730fbe8731 service nova] Acquired lock "refresh_cache-ffe7c98f-1367-44fa-b8a2-f34b8de7dfde" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 613.581337] env[61868]: DEBUG nova.network.neutron [req-7b0e6dd2-cb56-4bf2-88bb-a5cacea381a9 req-008a0b29-c923-4965-ac93-d0730fbe8731 service nova] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Refreshing network info cache for port 962e2556-54e1-4677-bf83-97889db7f5cc {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 614.162577] env[61868]: DEBUG nova.network.neutron [req-7b0e6dd2-cb56-4bf2-88bb-a5cacea381a9 req-008a0b29-c923-4965-ac93-d0730fbe8731 service nova] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Updated VIF entry in instance network info cache for port 962e2556-54e1-4677-bf83-97889db7f5cc. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 614.162934] env[61868]: DEBUG nova.network.neutron [req-7b0e6dd2-cb56-4bf2-88bb-a5cacea381a9 req-008a0b29-c923-4965-ac93-d0730fbe8731 service nova] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Updating instance_info_cache with network_info: [{"id": "962e2556-54e1-4677-bf83-97889db7f5cc", "address": "fa:16:3e:9d:ae:f5", "network": {"id": "66ef860b-d0b2-47a0-acfa-19f4014921b1", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-372785353-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "2060ef1ed49e4b789d28e03a235d3369", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap962e2556-54", "ovs_interfaceid": "962e2556-54e1-4677-bf83-97889db7f5cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.176682] env[61868]: DEBUG oslo_concurrency.lockutils [req-7b0e6dd2-cb56-4bf2-88bb-a5cacea381a9 req-008a0b29-c923-4965-ac93-d0730fbe8731 service nova] Releasing lock "refresh_cache-ffe7c98f-1367-44fa-b8a2-f34b8de7dfde" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 616.805612] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1a3ccf20-aa51-4cab-a664-b563a9fc7ddc tempest-FloatingIPsAssociationTestJSON-873127927 tempest-FloatingIPsAssociationTestJSON-873127927-project-member] Acquiring lock "46a45c39-ab23-4918-9d7e-84093c2b7ce8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 616.805937] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1a3ccf20-aa51-4cab-a664-b563a9fc7ddc tempest-FloatingIPsAssociationTestJSON-873127927 tempest-FloatingIPsAssociationTestJSON-873127927-project-member] Lock "46a45c39-ab23-4918-9d7e-84093c2b7ce8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 618.124465] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d5f069d2-e302-47af-9f35-877fffa7d02d tempest-ServersAdminTestJSON-526196414 tempest-ServersAdminTestJSON-526196414-project-member] Acquiring lock "ac5d7c83-1d3c-46b8-af48-4ec6c57ad070" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 618.124787] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d5f069d2-e302-47af-9f35-877fffa7d02d tempest-ServersAdminTestJSON-526196414 tempest-ServersAdminTestJSON-526196414-project-member] Lock "ac5d7c83-1d3c-46b8-af48-4ec6c57ad070" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 619.375399] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5286c1cd-9f99-47a2-876f-aef21f283c0a tempest-ServerDiagnosticsV248Test-873728587 tempest-ServerDiagnosticsV248Test-873728587-project-member] Acquiring lock "9cb259e7-5102-4610-92ba-f30bbbcdbd9c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 619.375668] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5286c1cd-9f99-47a2-876f-aef21f283c0a tempest-ServerDiagnosticsV248Test-873728587 tempest-ServerDiagnosticsV248Test-873728587-project-member] Lock "9cb259e7-5102-4610-92ba-f30bbbcdbd9c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 620.714090] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0ba98e26-1c67-411c-9ced-6ae7de9f92b5 tempest-ServersAdminTestJSON-526196414 tempest-ServersAdminTestJSON-526196414-project-member] Acquiring lock "32ef9425-a629-4284-81d2-0dfa848e4420" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 620.714381] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0ba98e26-1c67-411c-9ced-6ae7de9f92b5 tempest-ServersAdminTestJSON-526196414 tempest-ServersAdminTestJSON-526196414-project-member] Lock "32ef9425-a629-4284-81d2-0dfa848e4420" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 625.393020] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54f023d9-dbb2-4d6d-b699-302b19f4af18 tempest-SecurityGroupsTestJSON-1335660091 tempest-SecurityGroupsTestJSON-1335660091-project-member] Acquiring lock "7b2e8196-5ec1-4cef-9183-2d4f43639a47" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 625.393419] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54f023d9-dbb2-4d6d-b699-302b19f4af18 tempest-SecurityGroupsTestJSON-1335660091 tempest-SecurityGroupsTestJSON-1335660091-project-member] Lock "7b2e8196-5ec1-4cef-9183-2d4f43639a47" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 627.265051] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c98c0f0d-fa8f-4cf9-bf7a-9b3c044c1601 tempest-VolumesAdminNegativeTest-1661419758 tempest-VolumesAdminNegativeTest-1661419758-project-member] Acquiring lock "a9f648e5-c026-4bcf-a4b4-81cfbc5532a3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 627.265361] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c98c0f0d-fa8f-4cf9-bf7a-9b3c044c1601 tempest-VolumesAdminNegativeTest-1661419758 tempest-VolumesAdminNegativeTest-1661419758-project-member] Lock "a9f648e5-c026-4bcf-a4b4-81cfbc5532a3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 627.838160] env[61868]: DEBUG oslo_concurrency.lockutils [None req-aa931c47-f04b-4254-9c88-7bb57e56c6c5 tempest-ServerMetadataNegativeTestJSON-1980073815 tempest-ServerMetadataNegativeTestJSON-1980073815-project-member] Acquiring lock "7c1bddc6-f5e5-4691-9ef6-e095d4f56ef4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 627.838395] env[61868]: DEBUG oslo_concurrency.lockutils [None req-aa931c47-f04b-4254-9c88-7bb57e56c6c5 tempest-ServerMetadataNegativeTestJSON-1980073815 tempest-ServerMetadataNegativeTestJSON-1980073815-project-member] Lock "7c1bddc6-f5e5-4691-9ef6-e095d4f56ef4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 630.614139] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1e9e2edd-7dd7-45cb-bf19-a0476ea30251 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "80be1533-08c9-4a44-975b-90ed5ac5402a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 630.614139] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1e9e2edd-7dd7-45cb-bf19-a0476ea30251 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "80be1533-08c9-4a44-975b-90ed5ac5402a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 632.425587] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4014b24a-6e17-4676-883e-f97ef382734d tempest-ServersTestFqdnHostnames-466634136 tempest-ServersTestFqdnHostnames-466634136-project-member] Acquiring lock "727c1245-a258-4b71-93bf-10977d80b3ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 632.426054] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4014b24a-6e17-4676-883e-f97ef382734d tempest-ServersTestFqdnHostnames-466634136 tempest-ServersTestFqdnHostnames-466634136-project-member] Lock "727c1245-a258-4b71-93bf-10977d80b3ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 635.651044] env[61868]: WARNING oslo_vmware.rw_handles [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 635.651044] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 635.651044] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 635.651044] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 635.651044] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 635.651044] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 635.651044] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 635.651044] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 635.651044] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 635.651044] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 635.651044] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 635.651044] env[61868]: ERROR oslo_vmware.rw_handles [ 635.651703] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/bd815e09-39cf-4898-9448-79bb3e876fa5/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 635.654148] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 635.654400] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Copying Virtual Disk [datastore2] vmware_temp/bd815e09-39cf-4898-9448-79bb3e876fa5/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/bd815e09-39cf-4898-9448-79bb3e876fa5/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 635.654681] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-857cae93-e1cf-4c84-893e-ff6feda4a3a8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.667036] env[61868]: DEBUG oslo_vmware.api [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Waiting for the task: (returnval){ [ 635.667036] env[61868]: value = "task-40931" [ 635.667036] env[61868]: _type = "Task" [ 635.667036] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.679725] env[61868]: DEBUG oslo_vmware.api [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Task: {'id': task-40931, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.188051] env[61868]: DEBUG oslo_vmware.exceptions [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 636.190646] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 636.193444] env[61868]: ERROR nova.compute.manager [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 636.193444] env[61868]: Faults: ['InvalidArgument'] [ 636.193444] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Traceback (most recent call last): [ 636.193444] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 636.193444] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] yield resources [ 636.193444] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 636.193444] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] self.driver.spawn(context, instance, image_meta, [ 636.193444] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 636.193444] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 636.193444] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 636.193444] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] self._fetch_image_if_missing(context, vi) [ 636.193444] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 636.193993] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] image_cache(vi, tmp_image_ds_loc) [ 636.193993] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 636.193993] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] vm_util.copy_virtual_disk( [ 636.193993] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 636.193993] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] session._wait_for_task(vmdk_copy_task) [ 636.193993] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 636.193993] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] return self.wait_for_task(task_ref) [ 636.193993] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 636.193993] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] return evt.wait() [ 636.193993] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 636.193993] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] result = hub.switch() [ 636.193993] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 636.193993] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] return self.greenlet.switch() [ 636.194440] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 636.194440] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] self.f(*self.args, **self.kw) [ 636.194440] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 636.194440] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] raise exceptions.translate_fault(task_info.error) [ 636.194440] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 636.194440] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Faults: ['InvalidArgument'] [ 636.194440] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] [ 636.194440] env[61868]: INFO nova.compute.manager [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Terminating instance [ 636.203276] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 636.203276] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 636.203276] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6f4c5b3d-e966-407f-9d6d-7877403866d6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.205974] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Acquiring lock "refresh_cache-75fcbf2e-6097-42b1-9857-be7aecb9b7c0" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 636.205974] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Acquired lock "refresh_cache-75fcbf2e-6097-42b1-9857-be7aecb9b7c0" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 636.205974] env[61868]: DEBUG nova.network.neutron [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 636.217127] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 636.217378] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 636.218352] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd40054e-75cb-4b94-83df-3d79247e2ca8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.224477] env[61868]: DEBUG oslo_vmware.api [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Waiting for the task: (returnval){ [ 636.224477] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]524ba500-a90b-a735-2868-074fdc29b840" [ 636.224477] env[61868]: _type = "Task" [ 636.224477] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.233635] env[61868]: DEBUG oslo_vmware.api [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]524ba500-a90b-a735-2868-074fdc29b840, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.246864] env[61868]: DEBUG nova.network.neutron [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 636.306800] env[61868]: DEBUG nova.network.neutron [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.326621] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Releasing lock "refresh_cache-75fcbf2e-6097-42b1-9857-be7aecb9b7c0" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 636.327348] env[61868]: DEBUG nova.compute.manager [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 636.327348] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 636.330358] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1a77cd1-e6ec-437b-a38a-0f90ef8307c2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.337937] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 636.338252] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-730283f5-7dd5-410c-bc08-c0d75f73e5a1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.382615] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 636.382615] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 636.382615] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Deleting the datastore file [datastore2] 75fcbf2e-6097-42b1-9857-be7aecb9b7c0 {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 636.382615] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6031c98a-6a10-42ac-a6fc-b2cd3a764d6f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.387886] env[61868]: DEBUG oslo_vmware.api [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Waiting for the task: (returnval){ [ 636.387886] env[61868]: value = "task-40933" [ 636.387886] env[61868]: _type = "Task" [ 636.387886] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.398848] env[61868]: DEBUG oslo_vmware.api [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Task: {'id': task-40933, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.739764] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 636.740250] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Creating directory with path [datastore2] vmware_temp/d42ce823-19dd-45d1-a674-5116c42d2de3/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 636.740651] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e65e8fcd-d7ee-4ef9-9864-a84d4b06662e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.757797] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Created directory with path [datastore2] vmware_temp/d42ce823-19dd-45d1-a674-5116c42d2de3/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 636.758333] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Fetch image to [datastore2] vmware_temp/d42ce823-19dd-45d1-a674-5116c42d2de3/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 636.760540] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/d42ce823-19dd-45d1-a674-5116c42d2de3/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 636.761831] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b702c134-00c1-4fa9-a4f9-9de0f5b42726 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.772846] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6339291a-d55d-4183-8c02-053d5efd394b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.784059] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af0ee68-137e-4f1b-8dcd-e091f1f77c40 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.817797] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2183b96-eddc-47aa-9367-fcd5da7a0b6c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.825678] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-abbc2b6d-dcc1-406c-a5bb-217ee049760d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.898229] env[61868]: DEBUG oslo_vmware.api [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Task: {'id': task-40933, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.037312} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.898670] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 636.898943] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 636.899180] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 636.899661] env[61868]: INFO nova.compute.manager [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Took 0.57 seconds to destroy the instance on the hypervisor. [ 636.899984] env[61868]: DEBUG oslo.service.loopingcall [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 636.900276] env[61868]: DEBUG nova.compute.manager [-] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Skipping network deallocation for instance since networking was not requested. {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 636.903343] env[61868]: DEBUG nova.compute.claims [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 636.903981] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 636.903981] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 636.922928] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 637.001247] env[61868]: DEBUG oslo_vmware.rw_handles [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d42ce823-19dd-45d1-a674-5116c42d2de3/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 637.066092] env[61868]: DEBUG oslo_vmware.rw_handles [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 637.066092] env[61868]: DEBUG oslo_vmware.rw_handles [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d42ce823-19dd-45d1-a674-5116c42d2de3/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 637.402607] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb032e7a-28a2-42f3-9485-896c5d0ccd6d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.410398] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66635449-9762-4da6-8b1b-e43215943e23 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.443628] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c90107-224c-4110-8dcc-ad28a3e4e4a7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.450897] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba51ad9-310d-482f-9ddf-70673352bbc9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.467947] env[61868]: DEBUG nova.compute.provider_tree [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 637.477661] env[61868]: DEBUG nova.scheduler.client.report [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 637.500454] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.596s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 637.500929] env[61868]: ERROR nova.compute.manager [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 637.500929] env[61868]: Faults: ['InvalidArgument'] [ 637.500929] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Traceback (most recent call last): [ 637.500929] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 637.500929] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] self.driver.spawn(context, instance, image_meta, [ 637.500929] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 637.500929] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 637.500929] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 637.500929] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] self._fetch_image_if_missing(context, vi) [ 637.500929] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 637.500929] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] image_cache(vi, tmp_image_ds_loc) [ 637.500929] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 637.501349] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] vm_util.copy_virtual_disk( [ 637.501349] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 637.501349] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] session._wait_for_task(vmdk_copy_task) [ 637.501349] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 637.501349] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] return self.wait_for_task(task_ref) [ 637.501349] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 637.501349] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] return evt.wait() [ 637.501349] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 637.501349] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] result = hub.switch() [ 637.501349] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 637.501349] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] return self.greenlet.switch() [ 637.501349] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 637.501349] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] self.f(*self.args, **self.kw) [ 637.501802] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 637.501802] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] raise exceptions.translate_fault(task_info.error) [ 637.501802] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 637.501802] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Faults: ['InvalidArgument'] [ 637.501802] env[61868]: ERROR nova.compute.manager [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] [ 637.501802] env[61868]: DEBUG nova.compute.utils [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 637.503490] env[61868]: DEBUG nova.compute.manager [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Build of instance 75fcbf2e-6097-42b1-9857-be7aecb9b7c0 was re-scheduled: A specified parameter was not correct: fileType [ 637.503490] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 637.503915] env[61868]: DEBUG nova.compute.manager [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 637.504492] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Acquiring lock "refresh_cache-75fcbf2e-6097-42b1-9857-be7aecb9b7c0" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 637.504661] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Acquired lock "refresh_cache-75fcbf2e-6097-42b1-9857-be7aecb9b7c0" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 637.504824] env[61868]: DEBUG nova.network.neutron [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 637.544561] env[61868]: DEBUG nova.network.neutron [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 637.602181] env[61868]: DEBUG nova.network.neutron [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.611044] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Releasing lock "refresh_cache-75fcbf2e-6097-42b1-9857-be7aecb9b7c0" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 637.611277] env[61868]: DEBUG nova.compute.manager [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 637.611455] env[61868]: DEBUG nova.compute.manager [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] [instance: 75fcbf2e-6097-42b1-9857-be7aecb9b7c0] Skipping network deallocation for instance since networking was not requested. {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 637.744753] env[61868]: INFO nova.scheduler.client.report [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Deleted allocations for instance 75fcbf2e-6097-42b1-9857-be7aecb9b7c0 [ 637.772991] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2289a208-a096-4924-acdd-181e2cbc48ec tempest-AutoAllocateNetworkTest-321347917 tempest-AutoAllocateNetworkTest-321347917-project-member] Lock "75fcbf2e-6097-42b1-9857-be7aecb9b7c0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.965s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 637.813567] env[61868]: DEBUG nova.compute.manager [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 637.887485] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 637.887765] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 637.890817] env[61868]: INFO nova.compute.claims [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 638.014700] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquiring lock "972ab1c7-03b0-4294-930c-8084674083ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 638.014932] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Lock "972ab1c7-03b0-4294-930c-8084674083ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 638.457617] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d98d5c1d-1870-4531-98e2-6c0ae2d7f177 tempest-ImagesOneServerNegativeTestJSON-914211067 tempest-ImagesOneServerNegativeTestJSON-914211067-project-member] Acquiring lock "18918990-c7b2-40b4-9683-ae0635fcc367" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 638.457852] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d98d5c1d-1870-4531-98e2-6c0ae2d7f177 tempest-ImagesOneServerNegativeTestJSON-914211067 tempest-ImagesOneServerNegativeTestJSON-914211067-project-member] Lock "18918990-c7b2-40b4-9683-ae0635fcc367" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 638.478683] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c392ebe-9a50-48e1-9e90-8afb7b632258 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.486981] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a6df89a-d567-4e44-81d8-096b694c0437 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.520247] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2acde226-604a-44ef-8992-fb17a94f1c00 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.528365] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b8e643f-53ec-4258-aec2-b58508f788d8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.542470] env[61868]: DEBUG nova.compute.provider_tree [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 638.551477] env[61868]: DEBUG nova.scheduler.client.report [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 638.569153] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.681s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 638.570063] env[61868]: DEBUG nova.compute.manager [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 638.618215] env[61868]: DEBUG nova.compute.utils [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 638.619527] env[61868]: DEBUG nova.compute.manager [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 638.619759] env[61868]: DEBUG nova.network.neutron [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 638.635694] env[61868]: DEBUG nova.compute.manager [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 638.671768] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 638.683441] env[61868]: DEBUG nova.policy [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a91b3a84ec5c48d896a5bf3d8c568343', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9eabc0f9c1604e90b373219843edfc8d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 638.704575] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 638.726257] env[61868]: DEBUG nova.compute.manager [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 638.748128] env[61868]: DEBUG nova.virt.hardware [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 638.748481] env[61868]: DEBUG nova.virt.hardware [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 638.748543] env[61868]: DEBUG nova.virt.hardware [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 638.748777] env[61868]: DEBUG nova.virt.hardware [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 638.748880] env[61868]: DEBUG nova.virt.hardware [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 638.749045] env[61868]: DEBUG nova.virt.hardware [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 638.749271] env[61868]: DEBUG nova.virt.hardware [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 638.749416] env[61868]: DEBUG nova.virt.hardware [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 638.749573] env[61868]: DEBUG nova.virt.hardware [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 638.749734] env[61868]: DEBUG nova.virt.hardware [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 638.749975] env[61868]: DEBUG nova.virt.hardware [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 638.750960] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-176a178b-1f31-4d39-ae61-8dec5c7dcb42 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.759784] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54179584-7763-4622-b594-911f5b105add {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.201064] env[61868]: DEBUG nova.network.neutron [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Successfully created port: f9d6cf52-bbd4-4cbd-9f2f-39a4dadf3fe0 {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 639.352389] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 639.352389] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 639.352389] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 639.963199] env[61868]: DEBUG nova.network.neutron [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Successfully updated port: f9d6cf52-bbd4-4cbd-9f2f-39a4dadf3fe0 {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 639.980069] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "refresh_cache-4ed52e2d-018f-4405-9380-0c7f62ef2db3" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 639.980069] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquired lock "refresh_cache-4ed52e2d-018f-4405-9380-0c7f62ef2db3" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 639.980069] env[61868]: DEBUG nova.network.neutron [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 640.024542] env[61868]: DEBUG nova.network.neutron [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 640.338353] env[61868]: DEBUG nova.network.neutron [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Updating instance_info_cache with network_info: [{"id": "f9d6cf52-bbd4-4cbd-9f2f-39a4dadf3fe0", "address": "fa:16:3e:88:65:8a", "network": {"id": "1018eb28-650c-4602-95a9-5e1826ff57fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-229121660-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "9eabc0f9c1604e90b373219843edfc8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9d6cf52-bb", "ovs_interfaceid": "f9d6cf52-bbd4-4cbd-9f2f-39a4dadf3fe0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.346684] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 640.351889] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 640.352165] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 640.352290] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 640.353725] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Releasing lock "refresh_cache-4ed52e2d-018f-4405-9380-0c7f62ef2db3" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 640.354093] env[61868]: DEBUG nova.compute.manager [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Instance network_info: |[{"id": "f9d6cf52-bbd4-4cbd-9f2f-39a4dadf3fe0", "address": "fa:16:3e:88:65:8a", "network": {"id": "1018eb28-650c-4602-95a9-5e1826ff57fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-229121660-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "9eabc0f9c1604e90b373219843edfc8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9d6cf52-bb", "ovs_interfaceid": "f9d6cf52-bbd4-4cbd-9f2f-39a4dadf3fe0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 640.354802] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:88:65:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f9d6cf52-bbd4-4cbd-9f2f-39a4dadf3fe0', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 640.366436] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Creating folder: Project (9eabc0f9c1604e90b373219843edfc8d). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 640.367217] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0f3bba34-e567-462a-8cda-347c5907220d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.377291] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 640.377950] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 640.378168] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 640.378354] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 640.378866] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 640.379126] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 640.379246] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 640.379439] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 640.379618] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 640.379795] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 640.380278] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 640.382576] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 640.382978] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Created folder: Project (9eabc0f9c1604e90b373219843edfc8d) in parent group-v18181. [ 640.383273] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Creating folder: Instances. Parent ref: group-v18212. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 640.383787] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 640.384053] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 640.384301] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-225597b0-5589-4681-ac14-a77eb9b109ef {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.386204] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 640.396081] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Created folder: Instances in parent group-v18212. [ 640.396360] env[61868]: DEBUG oslo.service.loopingcall [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 640.396627] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 640.396918] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c6c30d6-22db-4101-98f7-e66b3821435c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.414239] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 640.414965] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 640.414965] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 640.414965] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 640.417046] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b76ffaa-fdfa-43dd-b7f4-c0829ede5921 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.426461] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 640.426461] env[61868]: value = "task-40936" [ 640.426461] env[61868]: _type = "Task" [ 640.426461] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.435129] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8548984-f905-44ec-9acf-ecd34fbf7aaa {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.444860] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40936, 'name': CreateVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.458331] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-681f8740-9936-425f-8a91-043740d1dd3a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.468486] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5e3f1c3-f503-4046-9e2f-f82bb6730c24 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.508187] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181877MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 640.508435] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 640.508600] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 640.607778] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 7b89d270-4000-484f-87dd-507335e5c7dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 640.608317] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 163a2904-1b18-4a83-9acf-6c9fe3ad511c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 640.608532] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance b9c8dd4c-8a19-4ffb-8e57-b273c000f121 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 640.608724] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 05c290e0-e98f-4f66-9e2c-f7d21992bb88 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 640.609083] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance c1efc2dd-6474-4fba-a00e-f104f0d446de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 640.609251] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 640.609506] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 882b26da-9f56-4bec-b10b-7b46b4c6ae04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 640.609855] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4cfa680a-0ea3-4c40-b89c-b6067397427a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 640.610042] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ffe7c98f-1367-44fa-b8a2-f34b8de7dfde actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 640.610192] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4ed52e2d-018f-4405-9380-0c7f62ef2db3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 640.621673] env[61868]: DEBUG nova.compute.manager [req-02401ff4-c4f9-4839-a3d7-37320c0c130b req-3ad0b5c3-40fd-48e3-a5e5-333cfea9b7d6 service nova] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Received event network-vif-plugged-f9d6cf52-bbd4-4cbd-9f2f-39a4dadf3fe0 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 640.621932] env[61868]: DEBUG oslo_concurrency.lockutils [req-02401ff4-c4f9-4839-a3d7-37320c0c130b req-3ad0b5c3-40fd-48e3-a5e5-333cfea9b7d6 service nova] Acquiring lock "4ed52e2d-018f-4405-9380-0c7f62ef2db3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 640.622153] env[61868]: DEBUG oslo_concurrency.lockutils [req-02401ff4-c4f9-4839-a3d7-37320c0c130b req-3ad0b5c3-40fd-48e3-a5e5-333cfea9b7d6 service nova] Lock "4ed52e2d-018f-4405-9380-0c7f62ef2db3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 640.622318] env[61868]: DEBUG oslo_concurrency.lockutils [req-02401ff4-c4f9-4839-a3d7-37320c0c130b req-3ad0b5c3-40fd-48e3-a5e5-333cfea9b7d6 service nova] Lock "4ed52e2d-018f-4405-9380-0c7f62ef2db3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 640.622485] env[61868]: DEBUG nova.compute.manager [req-02401ff4-c4f9-4839-a3d7-37320c0c130b req-3ad0b5c3-40fd-48e3-a5e5-333cfea9b7d6 service nova] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] No waiting events found dispatching network-vif-plugged-f9d6cf52-bbd4-4cbd-9f2f-39a4dadf3fe0 {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 640.622680] env[61868]: WARNING nova.compute.manager [req-02401ff4-c4f9-4839-a3d7-37320c0c130b req-3ad0b5c3-40fd-48e3-a5e5-333cfea9b7d6 service nova] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Received unexpected event network-vif-plugged-f9d6cf52-bbd4-4cbd-9f2f-39a4dadf3fe0 for instance with vm_state building and task_state spawning. [ 640.651764] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 2466fe4e-2589-4417-a63a-4d8bc695109d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 640.682214] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 1df194bf-fa9b-4d03-9b20-8478147de566 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 640.712034] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4cec72dc-99c1-4cf9-b391-a909bab7fb23 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 640.729797] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 24d274d6-04c7-4f4a-941b-31c539054dc5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 640.749657] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 0e6a35a2-c4cf-4e4a-9f87-45d0d4c5fead has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 640.761041] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance a19a14cd-b92d-4b3d-a06a-75b59333d1af has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 640.781871] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 46a45c39-ab23-4918-9d7e-84093c2b7ce8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 640.801704] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ac5d7c83-1d3c-46b8-af48-4ec6c57ad070 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 640.818134] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 9cb259e7-5102-4610-92ba-f30bbbcdbd9c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 640.857974] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 32ef9425-a629-4284-81d2-0dfa848e4420 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 640.870199] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 7b2e8196-5ec1-4cef-9183-2d4f43639a47 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 640.881374] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance a9f648e5-c026-4bcf-a4b4-81cfbc5532a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 640.893706] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 7c1bddc6-f5e5-4691-9ef6-e095d4f56ef4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 640.914977] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 80be1533-08c9-4a44-975b-90ed5ac5402a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 640.940036] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40936, 'name': CreateVM_Task} progress is 99%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.944163] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 727c1245-a258-4b71-93bf-10977d80b3ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 640.955469] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 972ab1c7-03b0-4294-930c-8084674083ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 640.966974] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 18918990-c7b2-40b4-9683-ae0635fcc367 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 640.967281] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 640.967373] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 641.403325] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf9f40a3-ce38-474f-8043-8e92d5a22e2d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.411741] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8947d80f-fb93-40bd-83ab-9965166d07f2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.446439] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b68919c-783c-4b58-88e3-cdd7dfd7a13c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.456578] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40936, 'name': CreateVM_Task} progress is 99%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.458355] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eee62ec0-9e95-43df-93f3-cc16d33a932c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.473303] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 641.482894] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 641.503968] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 641.504273] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.996s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 641.952282] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40936, 'name': CreateVM_Task, 'duration_secs': 1.40557} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.952466] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 641.953060] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 641.953289] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 641.956075] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f01b80-8088-4594-af58-62f366de89f3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.996300] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Reconfiguring VM instance to enable vnc on port - 5900 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 641.996884] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58dea187-1bf2-4b07-9690-968c16d974ba {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.013797] env[61868]: DEBUG oslo_vmware.api [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Waiting for the task: (returnval){ [ 642.013797] env[61868]: value = "task-40937" [ 642.013797] env[61868]: _type = "Task" [ 642.013797] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.023838] env[61868]: DEBUG oslo_vmware.api [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Task: {'id': task-40937, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.524491] env[61868]: DEBUG oslo_vmware.api [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Task: {'id': task-40937, 'name': ReconfigVM_Task, 'duration_secs': 0.174707} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.524970] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Reconfigured VM instance to enable vnc on port - 5900 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 642.525259] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.572s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 642.525619] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 642.525808] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 642.526303] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 642.526660] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ebf7644-45ad-48f9-b731-6210852894f8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.532379] env[61868]: DEBUG oslo_vmware.api [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Waiting for the task: (returnval){ [ 642.532379] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]524dae4e-028b-d036-f4aa-a7fab1df0528" [ 642.532379] env[61868]: _type = "Task" [ 642.532379] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.542683] env[61868]: DEBUG oslo_vmware.api [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]524dae4e-028b-d036-f4aa-a7fab1df0528, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.043517] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 643.043803] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 643.043978] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 643.115398] env[61868]: DEBUG nova.compute.manager [req-2604505e-f350-47b4-ae35-e5e1b4077899 req-06980c87-552c-44d9-80d7-6d37eddb1a1a service nova] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Received event network-changed-f9d6cf52-bbd4-4cbd-9f2f-39a4dadf3fe0 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 643.115603] env[61868]: DEBUG nova.compute.manager [req-2604505e-f350-47b4-ae35-e5e1b4077899 req-06980c87-552c-44d9-80d7-6d37eddb1a1a service nova] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Refreshing instance network info cache due to event network-changed-f9d6cf52-bbd4-4cbd-9f2f-39a4dadf3fe0. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 643.115852] env[61868]: DEBUG oslo_concurrency.lockutils [req-2604505e-f350-47b4-ae35-e5e1b4077899 req-06980c87-552c-44d9-80d7-6d37eddb1a1a service nova] Acquiring lock "refresh_cache-4ed52e2d-018f-4405-9380-0c7f62ef2db3" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 643.116059] env[61868]: DEBUG oslo_concurrency.lockutils [req-2604505e-f350-47b4-ae35-e5e1b4077899 req-06980c87-552c-44d9-80d7-6d37eddb1a1a service nova] Acquired lock "refresh_cache-4ed52e2d-018f-4405-9380-0c7f62ef2db3" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 643.116606] env[61868]: DEBUG nova.network.neutron [req-2604505e-f350-47b4-ae35-e5e1b4077899 req-06980c87-552c-44d9-80d7-6d37eddb1a1a service nova] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Refreshing network info cache for port f9d6cf52-bbd4-4cbd-9f2f-39a4dadf3fe0 {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 643.586243] env[61868]: DEBUG nova.network.neutron [req-2604505e-f350-47b4-ae35-e5e1b4077899 req-06980c87-552c-44d9-80d7-6d37eddb1a1a service nova] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Updated VIF entry in instance network info cache for port f9d6cf52-bbd4-4cbd-9f2f-39a4dadf3fe0. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 643.586243] env[61868]: DEBUG nova.network.neutron [req-2604505e-f350-47b4-ae35-e5e1b4077899 req-06980c87-552c-44d9-80d7-6d37eddb1a1a service nova] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Updating instance_info_cache with network_info: [{"id": "f9d6cf52-bbd4-4cbd-9f2f-39a4dadf3fe0", "address": "fa:16:3e:88:65:8a", "network": {"id": "1018eb28-650c-4602-95a9-5e1826ff57fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-229121660-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "9eabc0f9c1604e90b373219843edfc8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9d6cf52-bb", "ovs_interfaceid": "f9d6cf52-bbd4-4cbd-9f2f-39a4dadf3fe0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.596173] env[61868]: DEBUG oslo_concurrency.lockutils [req-2604505e-f350-47b4-ae35-e5e1b4077899 req-06980c87-552c-44d9-80d7-6d37eddb1a1a service nova] Releasing lock "refresh_cache-4ed52e2d-018f-4405-9380-0c7f62ef2db3" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 652.200702] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a1dc6426-1e5e-48e4-af17-04227f7d0f9b tempest-AttachVolumeShelveTestJSON-1537042233 tempest-AttachVolumeShelveTestJSON-1537042233-project-member] Acquiring lock "34945829-cc74-4bae-9af7-99504a130e2b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 652.201116] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a1dc6426-1e5e-48e4-af17-04227f7d0f9b tempest-AttachVolumeShelveTestJSON-1537042233 tempest-AttachVolumeShelveTestJSON-1537042233-project-member] Lock "34945829-cc74-4bae-9af7-99504a130e2b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 655.091342] env[61868]: DEBUG oslo_concurrency.lockutils [None req-931bc777-7667-4ab3-b0f1-bd4defb8f838 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "62c9fb4b-cea9-4ab4-bef9-cd3d80bf5df5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 655.091855] env[61868]: DEBUG oslo_concurrency.lockutils [None req-931bc777-7667-4ab3-b0f1-bd4defb8f838 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "62c9fb4b-cea9-4ab4-bef9-cd3d80bf5df5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 657.115425] env[61868]: DEBUG oslo_concurrency.lockutils [None req-832b114b-a72a-48a9-b479-c761019aa7a5 tempest-AttachInterfacesTestJSON-327680650 tempest-AttachInterfacesTestJSON-327680650-project-member] Acquiring lock "8cfd5561-08f1-49b8-b518-73104a987fc9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 657.115888] env[61868]: DEBUG oslo_concurrency.lockutils [None req-832b114b-a72a-48a9-b479-c761019aa7a5 tempest-AttachInterfacesTestJSON-327680650 tempest-AttachInterfacesTestJSON-327680650-project-member] Lock "8cfd5561-08f1-49b8-b518-73104a987fc9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 664.832210] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fa446331-1e47-4e1d-8415-cf56bca350b4 tempest-AttachInterfacesV270Test-169603213 tempest-AttachInterfacesV270Test-169603213-project-member] Acquiring lock "f4eb7096-af70-4a46-8e9d-2b94a185afcb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 664.832210] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fa446331-1e47-4e1d-8415-cf56bca350b4 tempest-AttachInterfacesV270Test-169603213 tempest-AttachInterfacesV270Test-169603213-project-member] Lock "f4eb7096-af70-4a46-8e9d-2b94a185afcb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 684.629393] env[61868]: WARNING oslo_vmware.rw_handles [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 684.629393] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 684.629393] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 684.629393] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 684.629393] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 684.629393] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 684.629393] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 684.629393] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 684.629393] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 684.629393] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 684.629393] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 684.629393] env[61868]: ERROR oslo_vmware.rw_handles [ 684.630128] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/d42ce823-19dd-45d1-a674-5116c42d2de3/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 684.631585] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 684.631817] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Copying Virtual Disk [datastore2] vmware_temp/d42ce823-19dd-45d1-a674-5116c42d2de3/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/d42ce823-19dd-45d1-a674-5116c42d2de3/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 684.632162] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-31f8407c-2660-491c-addc-3629f620b88d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.639464] env[61868]: DEBUG oslo_vmware.api [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Waiting for the task: (returnval){ [ 684.639464] env[61868]: value = "task-40947" [ 684.639464] env[61868]: _type = "Task" [ 684.639464] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.648421] env[61868]: DEBUG oslo_vmware.api [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Task: {'id': task-40947, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.150631] env[61868]: DEBUG oslo_vmware.exceptions [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 685.150968] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 685.151528] env[61868]: ERROR nova.compute.manager [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 685.151528] env[61868]: Faults: ['InvalidArgument'] [ 685.151528] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Traceback (most recent call last): [ 685.151528] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 685.151528] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] yield resources [ 685.151528] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 685.151528] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] self.driver.spawn(context, instance, image_meta, [ 685.151528] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 685.151528] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 685.151528] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 685.151528] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] self._fetch_image_if_missing(context, vi) [ 685.151528] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 685.151855] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] image_cache(vi, tmp_image_ds_loc) [ 685.151855] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 685.151855] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] vm_util.copy_virtual_disk( [ 685.151855] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 685.151855] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] session._wait_for_task(vmdk_copy_task) [ 685.151855] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 685.151855] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] return self.wait_for_task(task_ref) [ 685.151855] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 685.151855] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] return evt.wait() [ 685.151855] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 685.151855] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] result = hub.switch() [ 685.151855] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 685.151855] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] return self.greenlet.switch() [ 685.152200] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 685.152200] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] self.f(*self.args, **self.kw) [ 685.152200] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 685.152200] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] raise exceptions.translate_fault(task_info.error) [ 685.152200] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 685.152200] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Faults: ['InvalidArgument'] [ 685.152200] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] [ 685.152200] env[61868]: INFO nova.compute.manager [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Terminating instance [ 685.154268] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 685.154268] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 685.154268] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Acquiring lock "refresh_cache-7b89d270-4000-484f-87dd-507335e5c7dc" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 685.154268] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Acquired lock "refresh_cache-7b89d270-4000-484f-87dd-507335e5c7dc" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 685.154468] env[61868]: DEBUG nova.network.neutron [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 685.155297] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-deaf7e2f-105c-45dc-993f-195dd254427b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.165155] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 685.165579] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 685.167932] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99cf3e5a-94d5-47c1-a1ba-0060ec535816 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.173379] env[61868]: DEBUG oslo_vmware.api [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Waiting for the task: (returnval){ [ 685.173379] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52039366-39e9-6ac6-b14d-5396aba518aa" [ 685.173379] env[61868]: _type = "Task" [ 685.173379] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.181999] env[61868]: DEBUG oslo_vmware.api [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52039366-39e9-6ac6-b14d-5396aba518aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.183460] env[61868]: DEBUG nova.network.neutron [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 685.210175] env[61868]: DEBUG nova.network.neutron [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.220555] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Releasing lock "refresh_cache-7b89d270-4000-484f-87dd-507335e5c7dc" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 685.220712] env[61868]: DEBUG nova.compute.manager [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 685.220921] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 685.221995] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5ac7dec-a909-4814-8918-140739b6aed1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.232629] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 685.232874] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-93faa6b9-df30-47ba-97cf-3ee63978db95 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.259608] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 685.259850] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 685.260043] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Deleting the datastore file [datastore2] 7b89d270-4000-484f-87dd-507335e5c7dc {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 685.260298] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aeb93a2a-6b3e-4cfa-9eaf-a3f89f2b3131 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.268022] env[61868]: DEBUG oslo_vmware.api [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Waiting for the task: (returnval){ [ 685.268022] env[61868]: value = "task-40949" [ 685.268022] env[61868]: _type = "Task" [ 685.268022] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.276807] env[61868]: DEBUG oslo_vmware.api [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Task: {'id': task-40949, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.683754] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 685.684085] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Creating directory with path [datastore2] vmware_temp/9c53b4b2-554f-4444-aebb-fe3a324d2e8c/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 685.684367] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-685db01e-8113-47bc-bc75-80fe3fa2ef91 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.695752] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Created directory with path [datastore2] vmware_temp/9c53b4b2-554f-4444-aebb-fe3a324d2e8c/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 685.695972] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Fetch image to [datastore2] vmware_temp/9c53b4b2-554f-4444-aebb-fe3a324d2e8c/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 685.696169] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/9c53b4b2-554f-4444-aebb-fe3a324d2e8c/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 685.696954] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6acaba08-7789-431b-83fe-cb3e55b46d58 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.704421] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be9d55f8-5ef4-42f8-8bf5-be3941700b01 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.714279] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3daad665-bce7-4d97-9921-a6f48caffd14 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.750184] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-874fc8a2-72f6-46a2-9f17-0ba98b169bcc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.757179] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9d8ccdd9-bf64-4e0e-8cff-b916e634b73c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.778608] env[61868]: DEBUG oslo_vmware.api [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Task: {'id': task-40949, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.041087} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.778948] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 685.779182] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 685.779397] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 685.779613] env[61868]: INFO nova.compute.manager [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Took 0.56 seconds to destroy the instance on the hypervisor. [ 685.779901] env[61868]: DEBUG oslo.service.loopingcall [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 685.780158] env[61868]: DEBUG nova.compute.manager [-] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Skipping network deallocation for instance since networking was not requested. {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 685.782951] env[61868]: DEBUG nova.compute.claims [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 685.783169] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 685.783423] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 685.848417] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 685.907719] env[61868]: DEBUG oslo_vmware.rw_handles [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9c53b4b2-554f-4444-aebb-fe3a324d2e8c/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 685.968825] env[61868]: DEBUG oslo_vmware.rw_handles [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 685.969020] env[61868]: DEBUG oslo_vmware.rw_handles [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9c53b4b2-554f-4444-aebb-fe3a324d2e8c/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 686.298529] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f6b4619-3095-4ac4-8a53-90cc348c8990 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.306477] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48603cb4-1109-4dc5-b424-8aa71a420c29 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.337194] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f90f4df-e86c-4ac7-93f3-d3bc2e896306 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.345216] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d5571a-d366-43d6-9d0c-e36652474d71 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.358346] env[61868]: DEBUG nova.compute.provider_tree [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 686.367603] env[61868]: DEBUG nova.scheduler.client.report [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 686.385288] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.602s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 686.385965] env[61868]: ERROR nova.compute.manager [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 686.385965] env[61868]: Faults: ['InvalidArgument'] [ 686.385965] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Traceback (most recent call last): [ 686.385965] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 686.385965] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] self.driver.spawn(context, instance, image_meta, [ 686.385965] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 686.385965] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 686.385965] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 686.385965] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] self._fetch_image_if_missing(context, vi) [ 686.385965] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 686.385965] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] image_cache(vi, tmp_image_ds_loc) [ 686.385965] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 686.386321] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] vm_util.copy_virtual_disk( [ 686.386321] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 686.386321] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] session._wait_for_task(vmdk_copy_task) [ 686.386321] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 686.386321] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] return self.wait_for_task(task_ref) [ 686.386321] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 686.386321] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] return evt.wait() [ 686.386321] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 686.386321] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] result = hub.switch() [ 686.386321] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 686.386321] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] return self.greenlet.switch() [ 686.386321] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 686.386321] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] self.f(*self.args, **self.kw) [ 686.386715] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 686.386715] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] raise exceptions.translate_fault(task_info.error) [ 686.386715] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 686.386715] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Faults: ['InvalidArgument'] [ 686.386715] env[61868]: ERROR nova.compute.manager [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] [ 686.387452] env[61868]: DEBUG nova.compute.utils [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 686.389002] env[61868]: DEBUG nova.compute.manager [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Build of instance 7b89d270-4000-484f-87dd-507335e5c7dc was re-scheduled: A specified parameter was not correct: fileType [ 686.389002] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 686.389392] env[61868]: DEBUG nova.compute.manager [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 686.389621] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Acquiring lock "refresh_cache-7b89d270-4000-484f-87dd-507335e5c7dc" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 686.389765] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Acquired lock "refresh_cache-7b89d270-4000-484f-87dd-507335e5c7dc" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 686.389925] env[61868]: DEBUG nova.network.neutron [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 686.418617] env[61868]: DEBUG nova.network.neutron [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 686.443134] env[61868]: DEBUG nova.network.neutron [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.452729] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Releasing lock "refresh_cache-7b89d270-4000-484f-87dd-507335e5c7dc" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 686.453794] env[61868]: DEBUG nova.compute.manager [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 686.453794] env[61868]: DEBUG nova.compute.manager [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] [instance: 7b89d270-4000-484f-87dd-507335e5c7dc] Skipping network deallocation for instance since networking was not requested. {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 686.550592] env[61868]: INFO nova.scheduler.client.report [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Deleted allocations for instance 7b89d270-4000-484f-87dd-507335e5c7dc [ 686.575768] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ad2d75e-c87c-495c-908b-855e2085f9dc tempest-ServersAdmin275Test-1956128330 tempest-ServersAdmin275Test-1956128330-project-member] Lock "7b89d270-4000-484f-87dd-507335e5c7dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.509s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 686.591456] env[61868]: DEBUG nova.compute.manager [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 686.643382] env[61868]: DEBUG oslo_concurrency.lockutils [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 686.643643] env[61868]: DEBUG oslo_concurrency.lockutils [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 686.645239] env[61868]: INFO nova.compute.claims [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 687.111547] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5781b8f6-1270-4c79-afd6-bcc58f11481b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.119970] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50866f18-aebf-4502-bbc6-2e0b3bb93e90 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.150460] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ad12e4-a218-4f2b-9f88-226fd6dc5915 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.158231] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e28b84cb-25f7-4d25-9524-7e5de714c702 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.175820] env[61868]: DEBUG nova.compute.provider_tree [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 687.184369] env[61868]: DEBUG nova.scheduler.client.report [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 687.205467] env[61868]: DEBUG oslo_concurrency.lockutils [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.562s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 687.205946] env[61868]: DEBUG nova.compute.manager [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 687.247043] env[61868]: DEBUG nova.compute.utils [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 687.248419] env[61868]: DEBUG nova.compute.manager [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 687.248633] env[61868]: DEBUG nova.network.neutron [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 687.260844] env[61868]: DEBUG nova.compute.manager [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 687.299340] env[61868]: DEBUG nova.policy [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1557def5bb42425d88e0edc23ed18275', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '70614d5f9aaf40a7be0bbfe6fb9c496e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 687.360818] env[61868]: DEBUG nova.compute.manager [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 687.386833] env[61868]: DEBUG nova.virt.hardware [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 687.387115] env[61868]: DEBUG nova.virt.hardware [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 687.387308] env[61868]: DEBUG nova.virt.hardware [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 687.387473] env[61868]: DEBUG nova.virt.hardware [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 687.387637] env[61868]: DEBUG nova.virt.hardware [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 687.387779] env[61868]: DEBUG nova.virt.hardware [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 687.388201] env[61868]: DEBUG nova.virt.hardware [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 687.388391] env[61868]: DEBUG nova.virt.hardware [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 687.388744] env[61868]: DEBUG nova.virt.hardware [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 687.388934] env[61868]: DEBUG nova.virt.hardware [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 687.389113] env[61868]: DEBUG nova.virt.hardware [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 687.390284] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70d60c95-9fa8-4da2-9cb3-be88ee65e4a5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.399259] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39413ba7-9362-4626-8dd7-937f7bd5c681 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.571673] env[61868]: DEBUG nova.network.neutron [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Successfully created port: d78e0d1d-229a-4a36-86a2-a2326aa34843 {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 688.120803] env[61868]: DEBUG nova.network.neutron [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Successfully updated port: d78e0d1d-229a-4a36-86a2-a2326aa34843 {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 688.138170] env[61868]: DEBUG oslo_concurrency.lockutils [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Acquiring lock "refresh_cache-2466fe4e-2589-4417-a63a-4d8bc695109d" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 688.138324] env[61868]: DEBUG oslo_concurrency.lockutils [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Acquired lock "refresh_cache-2466fe4e-2589-4417-a63a-4d8bc695109d" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 688.138480] env[61868]: DEBUG nova.network.neutron [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 688.201485] env[61868]: DEBUG nova.network.neutron [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 688.411464] env[61868]: DEBUG nova.compute.manager [req-72da7334-ff04-49f8-84a7-a12e09057e8c req-457d1d4a-288f-49f2-948e-e0e0e8f42f88 service nova] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Received event network-vif-plugged-d78e0d1d-229a-4a36-86a2-a2326aa34843 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 688.411776] env[61868]: DEBUG oslo_concurrency.lockutils [req-72da7334-ff04-49f8-84a7-a12e09057e8c req-457d1d4a-288f-49f2-948e-e0e0e8f42f88 service nova] Acquiring lock "2466fe4e-2589-4417-a63a-4d8bc695109d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 688.411994] env[61868]: DEBUG oslo_concurrency.lockutils [req-72da7334-ff04-49f8-84a7-a12e09057e8c req-457d1d4a-288f-49f2-948e-e0e0e8f42f88 service nova] Lock "2466fe4e-2589-4417-a63a-4d8bc695109d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 688.412182] env[61868]: DEBUG oslo_concurrency.lockutils [req-72da7334-ff04-49f8-84a7-a12e09057e8c req-457d1d4a-288f-49f2-948e-e0e0e8f42f88 service nova] Lock "2466fe4e-2589-4417-a63a-4d8bc695109d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 688.412352] env[61868]: DEBUG nova.compute.manager [req-72da7334-ff04-49f8-84a7-a12e09057e8c req-457d1d4a-288f-49f2-948e-e0e0e8f42f88 service nova] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] No waiting events found dispatching network-vif-plugged-d78e0d1d-229a-4a36-86a2-a2326aa34843 {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 688.412514] env[61868]: WARNING nova.compute.manager [req-72da7334-ff04-49f8-84a7-a12e09057e8c req-457d1d4a-288f-49f2-948e-e0e0e8f42f88 service nova] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Received unexpected event network-vif-plugged-d78e0d1d-229a-4a36-86a2-a2326aa34843 for instance with vm_state building and task_state spawning. [ 688.477373] env[61868]: DEBUG nova.network.neutron [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Updating instance_info_cache with network_info: [{"id": "d78e0d1d-229a-4a36-86a2-a2326aa34843", "address": "fa:16:3e:c7:15:b5", "network": {"id": "7e11517c-b56a-4339-9227-03ac8179bffb", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-998543034-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "70614d5f9aaf40a7be0bbfe6fb9c496e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "274afb4c-04df-4213-8ad2-8f48a10d78a8", "external-id": "nsx-vlan-transportzone-515", "segmentation_id": 515, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd78e0d1d-22", "ovs_interfaceid": "d78e0d1d-229a-4a36-86a2-a2326aa34843", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.494098] env[61868]: DEBUG oslo_concurrency.lockutils [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Releasing lock "refresh_cache-2466fe4e-2589-4417-a63a-4d8bc695109d" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 688.494540] env[61868]: DEBUG nova.compute.manager [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Instance network_info: |[{"id": "d78e0d1d-229a-4a36-86a2-a2326aa34843", "address": "fa:16:3e:c7:15:b5", "network": {"id": "7e11517c-b56a-4339-9227-03ac8179bffb", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-998543034-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "70614d5f9aaf40a7be0bbfe6fb9c496e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "274afb4c-04df-4213-8ad2-8f48a10d78a8", "external-id": "nsx-vlan-transportzone-515", "segmentation_id": 515, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd78e0d1d-22", "ovs_interfaceid": "d78e0d1d-229a-4a36-86a2-a2326aa34843", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 688.495310] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:15:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '274afb4c-04df-4213-8ad2-8f48a10d78a8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd78e0d1d-229a-4a36-86a2-a2326aa34843', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 688.503511] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Creating folder: Project (70614d5f9aaf40a7be0bbfe6fb9c496e). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 688.504294] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-69f08152-7d09-4358-8127-7344749b7b3f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.516893] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Created folder: Project (70614d5f9aaf40a7be0bbfe6fb9c496e) in parent group-v18181. [ 688.517102] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Creating folder: Instances. Parent ref: group-v18219. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 688.517360] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4acc35d3-36e0-40d5-ad78-6d4f1f4f1e78 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.527296] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Created folder: Instances in parent group-v18219. [ 688.527551] env[61868]: DEBUG oslo.service.loopingcall [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 688.527764] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 688.527956] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2ae7f1ea-0a09-4806-bfdf-432ae8e2e425 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.548096] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 688.548096] env[61868]: value = "task-40954" [ 688.548096] env[61868]: _type = "Task" [ 688.548096] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.556698] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40954, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.058849] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40954, 'name': CreateVM_Task, 'duration_secs': 0.334763} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.059034] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 689.059690] env[61868]: DEBUG oslo_concurrency.lockutils [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 689.059884] env[61868]: DEBUG oslo_concurrency.lockutils [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 689.062895] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f641985f-2fc6-4e64-836a-52beea8c3efe {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.100142] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Reconfiguring VM instance to enable vnc on port - 5901 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 689.100508] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-130ffac5-7f5c-44f7-9be6-186b98948724 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.118615] env[61868]: DEBUG oslo_vmware.api [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Waiting for the task: (returnval){ [ 689.118615] env[61868]: value = "task-40955" [ 689.118615] env[61868]: _type = "Task" [ 689.118615] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.128584] env[61868]: DEBUG oslo_vmware.api [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Task: {'id': task-40955, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.629516] env[61868]: DEBUG oslo_vmware.api [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Task: {'id': task-40955, 'name': ReconfigVM_Task, 'duration_secs': 0.122095} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.629957] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Reconfigured VM instance to enable vnc on port - 5901 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 689.630182] env[61868]: DEBUG oslo_concurrency.lockutils [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.570s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 689.630734] env[61868]: DEBUG oslo_concurrency.lockutils [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 689.630950] env[61868]: DEBUG oslo_concurrency.lockutils [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 689.631281] env[61868]: DEBUG oslo_concurrency.lockutils [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 689.631562] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a274e7de-8e66-4888-8219-5c20eea701f3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.637129] env[61868]: DEBUG oslo_vmware.api [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Waiting for the task: (returnval){ [ 689.637129] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]5221a8bc-03c2-061e-fac4-2866bfbdd753" [ 689.637129] env[61868]: _type = "Task" [ 689.637129] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.645939] env[61868]: DEBUG oslo_vmware.api [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]5221a8bc-03c2-061e-fac4-2866bfbdd753, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.148842] env[61868]: DEBUG oslo_concurrency.lockutils [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 690.149251] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 690.149572] env[61868]: DEBUG oslo_concurrency.lockutils [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 690.494720] env[61868]: DEBUG nova.compute.manager [req-90432274-20ea-461f-a336-92270691200f req-46bd8854-e517-4880-a4a7-77a1a2e6b133 service nova] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Received event network-changed-d78e0d1d-229a-4a36-86a2-a2326aa34843 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 690.494982] env[61868]: DEBUG nova.compute.manager [req-90432274-20ea-461f-a336-92270691200f req-46bd8854-e517-4880-a4a7-77a1a2e6b133 service nova] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Refreshing instance network info cache due to event network-changed-d78e0d1d-229a-4a36-86a2-a2326aa34843. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 690.495153] env[61868]: DEBUG oslo_concurrency.lockutils [req-90432274-20ea-461f-a336-92270691200f req-46bd8854-e517-4880-a4a7-77a1a2e6b133 service nova] Acquiring lock "refresh_cache-2466fe4e-2589-4417-a63a-4d8bc695109d" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 690.495260] env[61868]: DEBUG oslo_concurrency.lockutils [req-90432274-20ea-461f-a336-92270691200f req-46bd8854-e517-4880-a4a7-77a1a2e6b133 service nova] Acquired lock "refresh_cache-2466fe4e-2589-4417-a63a-4d8bc695109d" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 690.495510] env[61868]: DEBUG nova.network.neutron [req-90432274-20ea-461f-a336-92270691200f req-46bd8854-e517-4880-a4a7-77a1a2e6b133 service nova] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Refreshing network info cache for port d78e0d1d-229a-4a36-86a2-a2326aa34843 {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 690.736193] env[61868]: DEBUG nova.network.neutron [req-90432274-20ea-461f-a336-92270691200f req-46bd8854-e517-4880-a4a7-77a1a2e6b133 service nova] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Updated VIF entry in instance network info cache for port d78e0d1d-229a-4a36-86a2-a2326aa34843. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 690.736551] env[61868]: DEBUG nova.network.neutron [req-90432274-20ea-461f-a336-92270691200f req-46bd8854-e517-4880-a4a7-77a1a2e6b133 service nova] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Updating instance_info_cache with network_info: [{"id": "d78e0d1d-229a-4a36-86a2-a2326aa34843", "address": "fa:16:3e:c7:15:b5", "network": {"id": "7e11517c-b56a-4339-9227-03ac8179bffb", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-998543034-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "70614d5f9aaf40a7be0bbfe6fb9c496e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "274afb4c-04df-4213-8ad2-8f48a10d78a8", "external-id": "nsx-vlan-transportzone-515", "segmentation_id": 515, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd78e0d1d-22", "ovs_interfaceid": "d78e0d1d-229a-4a36-86a2-a2326aa34843", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.746536] env[61868]: DEBUG oslo_concurrency.lockutils [req-90432274-20ea-461f-a336-92270691200f req-46bd8854-e517-4880-a4a7-77a1a2e6b133 service nova] Releasing lock "refresh_cache-2466fe4e-2589-4417-a63a-4d8bc695109d" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 698.935262] env[61868]: DEBUG oslo_concurrency.lockutils [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] Acquiring lock "7a2f7e4c-5dde-456d-bf41-c2d0cc507d17" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 698.935560] env[61868]: DEBUG oslo_concurrency.lockutils [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] Lock "7a2f7e4c-5dde-456d-bf41-c2d0cc507d17" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 701.472586] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 701.472857] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 701.473006] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 701.473165] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 701.493588] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 701.493762] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 701.493881] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 701.494009] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 701.494134] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 701.494256] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 701.494376] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 701.494495] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 701.494613] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 701.494732] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 701.494848] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 701.495312] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 701.495481] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 701.495632] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 701.495776] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 701.495919] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 701.505240] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 701.505498] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 701.505632] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 701.505786] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 701.506864] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8da654a-cf8e-4329-b405-ed63452ae0ec {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.516161] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ddfaa88-a2c5-46f5-b690-ec97335f4452 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.532394] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e43105b-120a-4147-b956-2ebf83ee160a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.539360] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8454d888-ebc5-4b86-864c-c4429f1bc67f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.570503] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181962MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 701.570681] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 701.570882] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 701.659256] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 163a2904-1b18-4a83-9acf-6c9fe3ad511c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 701.659433] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance b9c8dd4c-8a19-4ffb-8e57-b273c000f121 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 701.659564] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 05c290e0-e98f-4f66-9e2c-f7d21992bb88 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 701.659688] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance c1efc2dd-6474-4fba-a00e-f104f0d446de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 701.659808] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 701.659922] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 882b26da-9f56-4bec-b10b-7b46b4c6ae04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 701.660054] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4cfa680a-0ea3-4c40-b89c-b6067397427a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 701.660173] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ffe7c98f-1367-44fa-b8a2-f34b8de7dfde actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 701.660288] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4ed52e2d-018f-4405-9380-0c7f62ef2db3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 701.660399] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 2466fe4e-2589-4417-a63a-4d8bc695109d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 701.671639] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 1df194bf-fa9b-4d03-9b20-8478147de566 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 701.690007] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4cec72dc-99c1-4cf9-b391-a909bab7fb23 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 701.700782] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 24d274d6-04c7-4f4a-941b-31c539054dc5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 701.711609] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 0e6a35a2-c4cf-4e4a-9f87-45d0d4c5fead has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 701.722732] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance a19a14cd-b92d-4b3d-a06a-75b59333d1af has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 701.734247] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 46a45c39-ab23-4918-9d7e-84093c2b7ce8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 701.744947] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ac5d7c83-1d3c-46b8-af48-4ec6c57ad070 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 701.755637] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 9cb259e7-5102-4610-92ba-f30bbbcdbd9c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 701.766707] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 32ef9425-a629-4284-81d2-0dfa848e4420 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 701.780028] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 7b2e8196-5ec1-4cef-9183-2d4f43639a47 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 701.790134] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance a9f648e5-c026-4bcf-a4b4-81cfbc5532a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 701.801697] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 7c1bddc6-f5e5-4691-9ef6-e095d4f56ef4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 701.811962] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 80be1533-08c9-4a44-975b-90ed5ac5402a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 701.823329] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 727c1245-a258-4b71-93bf-10977d80b3ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 701.836692] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 972ab1c7-03b0-4294-930c-8084674083ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 701.846966] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 18918990-c7b2-40b4-9683-ae0635fcc367 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 701.857601] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 34945829-cc74-4bae-9af7-99504a130e2b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 701.867155] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 62c9fb4b-cea9-4ab4-bef9-cd3d80bf5df5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 701.878899] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8cfd5561-08f1-49b8-b518-73104a987fc9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 701.890003] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance f4eb7096-af70-4a46-8e9d-2b94a185afcb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 701.902327] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 7a2f7e4c-5dde-456d-bf41-c2d0cc507d17 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 701.902549] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 701.902615] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 702.290671] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42aabeba-6429-4d4d-aeaf-365c6f43c2c0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.300047] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-445c7e79-4412-4da1-85a9-fd702962153a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.329701] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f498784f-7852-431b-b16d-509da4081098 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.337550] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f9436a6-3522-45cc-b407-fb4793770b8d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.350935] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 702.359283] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 702.377775] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 702.377966] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.807s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 703.232931] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 703.233192] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 703.234035] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 734.361073] env[61868]: WARNING oslo_vmware.rw_handles [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 734.361073] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 734.361073] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 734.361073] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 734.361073] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 734.361073] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 734.361073] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 734.361073] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 734.361073] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 734.361073] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 734.361073] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 734.361073] env[61868]: ERROR oslo_vmware.rw_handles [ 734.361741] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/9c53b4b2-554f-4444-aebb-fe3a324d2e8c/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 734.363556] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 734.363840] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Copying Virtual Disk [datastore2] vmware_temp/9c53b4b2-554f-4444-aebb-fe3a324d2e8c/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/9c53b4b2-554f-4444-aebb-fe3a324d2e8c/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 734.364202] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-27c018dc-4444-4f25-a556-e9886a56c0cf {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.373170] env[61868]: DEBUG oslo_vmware.api [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Waiting for the task: (returnval){ [ 734.373170] env[61868]: value = "task-40956" [ 734.373170] env[61868]: _type = "Task" [ 734.373170] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.381961] env[61868]: DEBUG oslo_vmware.api [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Task: {'id': task-40956, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.885720] env[61868]: DEBUG oslo_vmware.exceptions [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 734.886241] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 734.886950] env[61868]: ERROR nova.compute.manager [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 734.886950] env[61868]: Faults: ['InvalidArgument'] [ 734.886950] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Traceback (most recent call last): [ 734.886950] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 734.886950] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] yield resources [ 734.886950] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 734.886950] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] self.driver.spawn(context, instance, image_meta, [ 734.886950] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 734.886950] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 734.886950] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 734.886950] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] self._fetch_image_if_missing(context, vi) [ 734.886950] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 734.887394] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] image_cache(vi, tmp_image_ds_loc) [ 734.887394] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 734.887394] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] vm_util.copy_virtual_disk( [ 734.887394] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 734.887394] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] session._wait_for_task(vmdk_copy_task) [ 734.887394] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 734.887394] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] return self.wait_for_task(task_ref) [ 734.887394] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 734.887394] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] return evt.wait() [ 734.887394] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 734.887394] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] result = hub.switch() [ 734.887394] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 734.887394] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] return self.greenlet.switch() [ 734.887770] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 734.887770] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] self.f(*self.args, **self.kw) [ 734.887770] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 734.887770] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] raise exceptions.translate_fault(task_info.error) [ 734.887770] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 734.887770] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Faults: ['InvalidArgument'] [ 734.887770] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] [ 734.888346] env[61868]: INFO nova.compute.manager [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Terminating instance [ 734.890846] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquiring lock "refresh_cache-163a2904-1b18-4a83-9acf-6c9fe3ad511c" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 734.891147] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquired lock "refresh_cache-163a2904-1b18-4a83-9acf-6c9fe3ad511c" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 734.891420] env[61868]: DEBUG nova.network.neutron [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 734.893121] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 734.893445] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 734.893806] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fd61f8fd-a822-4d73-afad-9309eb2bd99a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.904239] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 734.904696] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 734.907672] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-718b71b8-33e4-47a3-9c12-91c4b1e0f108 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.914169] env[61868]: DEBUG oslo_vmware.api [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Waiting for the task: (returnval){ [ 734.914169] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]5234a484-93ba-7c71-b4be-61df941f7e92" [ 734.914169] env[61868]: _type = "Task" [ 734.914169] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.924977] env[61868]: DEBUG oslo_vmware.api [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]5234a484-93ba-7c71-b4be-61df941f7e92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.925864] env[61868]: DEBUG nova.network.neutron [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 734.954977] env[61868]: DEBUG nova.network.neutron [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.965198] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Releasing lock "refresh_cache-163a2904-1b18-4a83-9acf-6c9fe3ad511c" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 734.965745] env[61868]: DEBUG nova.compute.manager [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 734.965973] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 734.967615] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72601111-e450-4564-9ce4-4ce4f3a3e82f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.977387] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 734.977709] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5538a29f-8e26-4e2b-bbd6-63fa89e1a67f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.017843] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 735.018070] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 735.018247] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Deleting the datastore file [datastore2] 163a2904-1b18-4a83-9acf-6c9fe3ad511c {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 735.018519] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-219b3fcb-e641-4c06-80c6-265818cf8ae1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.025840] env[61868]: DEBUG oslo_vmware.api [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Waiting for the task: (returnval){ [ 735.025840] env[61868]: value = "task-40958" [ 735.025840] env[61868]: _type = "Task" [ 735.025840] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.035376] env[61868]: DEBUG oslo_vmware.api [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Task: {'id': task-40958, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.425300] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 735.426281] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Creating directory with path [datastore2] vmware_temp/9d5e063a-eb82-4f16-b3e8-ff08eba0b564/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 735.426682] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3d2ae6a9-c26e-4bb7-8802-8ebc3cd11ca5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.439191] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Created directory with path [datastore2] vmware_temp/9d5e063a-eb82-4f16-b3e8-ff08eba0b564/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 735.440015] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Fetch image to [datastore2] vmware_temp/9d5e063a-eb82-4f16-b3e8-ff08eba0b564/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 735.440405] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/9d5e063a-eb82-4f16-b3e8-ff08eba0b564/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 735.441362] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed3809fa-97db-40ee-80ab-4488a433aea9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.449654] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4c5ff5e-86b8-49d2-a00b-0eca09b8c12c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.459303] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8668a437-055f-4f1a-ba65-cecb20ffb503 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.493803] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fe25e8d-1e89-433b-8471-bff6293dc2c4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.500426] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ce0da896-cf4e-47f5-84ab-151a6a6b344d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.522996] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 735.535975] env[61868]: DEBUG oslo_vmware.api [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Task: {'id': task-40958, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033654} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.536170] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 735.536337] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 735.536507] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 735.536679] env[61868]: INFO nova.compute.manager [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Took 0.57 seconds to destroy the instance on the hypervisor. [ 735.536936] env[61868]: DEBUG oslo.service.loopingcall [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 735.538705] env[61868]: DEBUG nova.compute.manager [-] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 735.538816] env[61868]: DEBUG nova.network.neutron [-] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 735.569406] env[61868]: DEBUG nova.network.neutron [-] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 735.576858] env[61868]: DEBUG oslo_vmware.rw_handles [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9d5e063a-eb82-4f16-b3e8-ff08eba0b564/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 735.578789] env[61868]: DEBUG nova.network.neutron [-] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.633100] env[61868]: INFO nova.compute.manager [-] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Took 0.09 seconds to deallocate network for instance. [ 735.636473] env[61868]: DEBUG nova.compute.claims [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 735.636661] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 735.636889] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 735.639896] env[61868]: DEBUG oslo_vmware.rw_handles [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 735.640098] env[61868]: DEBUG oslo_vmware.rw_handles [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9d5e063a-eb82-4f16-b3e8-ff08eba0b564/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 736.097081] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd766358-0ae2-4e8d-ba7c-941a5e6529b6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.105188] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fa68cf6-78ea-4a21-b8d0-ff4b1bbc1172 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.137493] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8f9016-e284-48c8-ab93-a08b72af0e7f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.146148] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a3637fe-8037-45fe-b509-bedc9eddaa07 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.160393] env[61868]: DEBUG nova.compute.provider_tree [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 736.171782] env[61868]: DEBUG nova.scheduler.client.report [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 736.189808] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.553s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 736.190425] env[61868]: ERROR nova.compute.manager [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 736.190425] env[61868]: Faults: ['InvalidArgument'] [ 736.190425] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Traceback (most recent call last): [ 736.190425] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 736.190425] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] self.driver.spawn(context, instance, image_meta, [ 736.190425] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 736.190425] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 736.190425] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 736.190425] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] self._fetch_image_if_missing(context, vi) [ 736.190425] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 736.190425] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] image_cache(vi, tmp_image_ds_loc) [ 736.190425] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 736.190792] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] vm_util.copy_virtual_disk( [ 736.190792] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 736.190792] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] session._wait_for_task(vmdk_copy_task) [ 736.190792] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 736.190792] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] return self.wait_for_task(task_ref) [ 736.190792] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 736.190792] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] return evt.wait() [ 736.190792] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 736.190792] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] result = hub.switch() [ 736.190792] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 736.190792] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] return self.greenlet.switch() [ 736.190792] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 736.190792] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] self.f(*self.args, **self.kw) [ 736.191146] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 736.191146] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] raise exceptions.translate_fault(task_info.error) [ 736.191146] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 736.191146] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Faults: ['InvalidArgument'] [ 736.191146] env[61868]: ERROR nova.compute.manager [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] [ 736.191291] env[61868]: DEBUG nova.compute.utils [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 736.192769] env[61868]: DEBUG nova.compute.manager [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Build of instance 163a2904-1b18-4a83-9acf-6c9fe3ad511c was re-scheduled: A specified parameter was not correct: fileType [ 736.192769] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 736.193186] env[61868]: DEBUG nova.compute.manager [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 736.193416] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquiring lock "refresh_cache-163a2904-1b18-4a83-9acf-6c9fe3ad511c" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 736.193570] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquired lock "refresh_cache-163a2904-1b18-4a83-9acf-6c9fe3ad511c" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 736.193797] env[61868]: DEBUG nova.network.neutron [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 736.221347] env[61868]: DEBUG nova.network.neutron [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 736.250970] env[61868]: DEBUG nova.network.neutron [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.260753] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Releasing lock "refresh_cache-163a2904-1b18-4a83-9acf-6c9fe3ad511c" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 736.261192] env[61868]: DEBUG nova.compute.manager [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 736.261502] env[61868]: DEBUG nova.compute.manager [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 736.261786] env[61868]: DEBUG nova.network.neutron [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 736.281379] env[61868]: DEBUG nova.network.neutron [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 736.290279] env[61868]: DEBUG nova.network.neutron [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.298532] env[61868]: INFO nova.compute.manager [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: 163a2904-1b18-4a83-9acf-6c9fe3ad511c] Took 0.04 seconds to deallocate network for instance. [ 736.398420] env[61868]: INFO nova.scheduler.client.report [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Deleted allocations for instance 163a2904-1b18-4a83-9acf-6c9fe3ad511c [ 736.418995] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4a458284-c004-49e3-8651-06fe1dbb1527 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Lock "163a2904-1b18-4a83-9acf-6c9fe3ad511c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 148.814s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 736.441058] env[61868]: DEBUG nova.compute.manager [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 736.496901] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 736.497159] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 736.498794] env[61868]: INFO nova.compute.claims [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 736.950147] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a9ea5e2-6d03-4083-9079-3d3d99ec647c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.957881] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee22c7f-689b-4114-ba78-44dc631cb159 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.992500] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-265282d4-3e63-4472-b4d1-68a69b15aeca {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.000797] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78a88ebc-3a8d-4d50-a275-dba6223e3ce3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.014399] env[61868]: DEBUG nova.compute.provider_tree [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 737.023429] env[61868]: DEBUG nova.scheduler.client.report [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 737.041216] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.544s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 737.052807] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Acquiring lock "ff9f2002-f535-4ebb-970a-3889aab3ec3d" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 737.053040] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Lock "ff9f2002-f535-4ebb-970a-3889aab3ec3d" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 737.060674] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Lock "ff9f2002-f535-4ebb-970a-3889aab3ec3d" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.007s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 737.061514] env[61868]: DEBUG nova.compute.manager [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 737.102031] env[61868]: DEBUG nova.compute.utils [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 737.103308] env[61868]: DEBUG nova.compute.manager [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 737.103474] env[61868]: DEBUG nova.network.neutron [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 737.114011] env[61868]: DEBUG nova.compute.manager [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 737.153126] env[61868]: DEBUG nova.policy [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a7dca4213a984df4af36fb07898be0e2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0555b04752b6461fa88bb82f89bbb753', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 737.188381] env[61868]: DEBUG nova.compute.manager [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 737.211343] env[61868]: DEBUG nova.virt.hardware [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 737.211705] env[61868]: DEBUG nova.virt.hardware [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 737.211935] env[61868]: DEBUG nova.virt.hardware [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 737.212199] env[61868]: DEBUG nova.virt.hardware [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 737.212403] env[61868]: DEBUG nova.virt.hardware [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 737.212615] env[61868]: DEBUG nova.virt.hardware [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 737.213072] env[61868]: DEBUG nova.virt.hardware [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 737.213305] env[61868]: DEBUG nova.virt.hardware [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 737.213536] env[61868]: DEBUG nova.virt.hardware [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 737.213865] env[61868]: DEBUG nova.virt.hardware [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 737.215491] env[61868]: DEBUG nova.virt.hardware [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 737.216375] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cd1dc8a-c36d-4c64-948d-a9705e5644e2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.231375] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b2bf83-9794-4076-a092-099c62a3ef34 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.421148] env[61868]: DEBUG nova.network.neutron [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Successfully created port: 62fdd978-7275-4c69-bc24-dd1fda84275a {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 738.002383] env[61868]: DEBUG oslo_concurrency.lockutils [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquiring lock "d6ac9ed4-56dd-493a-8d9f-0cfad210b6de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 738.002671] env[61868]: DEBUG oslo_concurrency.lockutils [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Lock "d6ac9ed4-56dd-493a-8d9f-0cfad210b6de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 738.008497] env[61868]: DEBUG nova.network.neutron [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Successfully updated port: 62fdd978-7275-4c69-bc24-dd1fda84275a {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 738.020577] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Acquiring lock "refresh_cache-1df194bf-fa9b-4d03-9b20-8478147de566" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 738.020761] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Acquired lock "refresh_cache-1df194bf-fa9b-4d03-9b20-8478147de566" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 738.020920] env[61868]: DEBUG nova.network.neutron [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 738.059741] env[61868]: DEBUG nova.network.neutron [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 738.141145] env[61868]: DEBUG nova.compute.manager [req-31610b39-c43e-42c2-a0bd-030080be914a req-855749b3-483a-40c9-b87d-9d5faf6f32de service nova] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Received event network-vif-plugged-62fdd978-7275-4c69-bc24-dd1fda84275a {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 738.141488] env[61868]: DEBUG oslo_concurrency.lockutils [req-31610b39-c43e-42c2-a0bd-030080be914a req-855749b3-483a-40c9-b87d-9d5faf6f32de service nova] Acquiring lock "1df194bf-fa9b-4d03-9b20-8478147de566-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 738.141612] env[61868]: DEBUG oslo_concurrency.lockutils [req-31610b39-c43e-42c2-a0bd-030080be914a req-855749b3-483a-40c9-b87d-9d5faf6f32de service nova] Lock "1df194bf-fa9b-4d03-9b20-8478147de566-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 738.141741] env[61868]: DEBUG oslo_concurrency.lockutils [req-31610b39-c43e-42c2-a0bd-030080be914a req-855749b3-483a-40c9-b87d-9d5faf6f32de service nova] Lock "1df194bf-fa9b-4d03-9b20-8478147de566-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 738.141904] env[61868]: DEBUG nova.compute.manager [req-31610b39-c43e-42c2-a0bd-030080be914a req-855749b3-483a-40c9-b87d-9d5faf6f32de service nova] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] No waiting events found dispatching network-vif-plugged-62fdd978-7275-4c69-bc24-dd1fda84275a {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 738.142069] env[61868]: WARNING nova.compute.manager [req-31610b39-c43e-42c2-a0bd-030080be914a req-855749b3-483a-40c9-b87d-9d5faf6f32de service nova] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Received unexpected event network-vif-plugged-62fdd978-7275-4c69-bc24-dd1fda84275a for instance with vm_state building and task_state spawning. [ 738.244651] env[61868]: DEBUG nova.network.neutron [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Updating instance_info_cache with network_info: [{"id": "62fdd978-7275-4c69-bc24-dd1fda84275a", "address": "fa:16:3e:e0:87:55", "network": {"id": "2f2de643-6f4f-4d17-ac19-f7b50fe34275", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-260532300-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "0555b04752b6461fa88bb82f89bbb753", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62fdd978-72", "ovs_interfaceid": "62fdd978-7275-4c69-bc24-dd1fda84275a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.263785] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Releasing lock "refresh_cache-1df194bf-fa9b-4d03-9b20-8478147de566" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 738.264109] env[61868]: DEBUG nova.compute.manager [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Instance network_info: |[{"id": "62fdd978-7275-4c69-bc24-dd1fda84275a", "address": "fa:16:3e:e0:87:55", "network": {"id": "2f2de643-6f4f-4d17-ac19-f7b50fe34275", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-260532300-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "0555b04752b6461fa88bb82f89bbb753", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62fdd978-72", "ovs_interfaceid": "62fdd978-7275-4c69-bc24-dd1fda84275a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 738.264526] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:87:55', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '352165bb-004f-4180-9627-3a275dbe18af', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '62fdd978-7275-4c69-bc24-dd1fda84275a', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 738.272709] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Creating folder: Project (0555b04752b6461fa88bb82f89bbb753). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 738.273407] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6698ae62-25c0-423c-a715-3cf9e0d02537 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.289332] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Created folder: Project (0555b04752b6461fa88bb82f89bbb753) in parent group-v18181. [ 738.289780] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Creating folder: Instances. Parent ref: group-v18222. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 738.289991] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d5006e17-d4c3-4b4d-a9b4-2df6f67cfbd8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.299517] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Created folder: Instances in parent group-v18222. [ 738.299763] env[61868]: DEBUG oslo.service.loopingcall [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 738.299960] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 738.300183] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-397f1e57-394b-4305-bc7d-bb7b13614408 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.320815] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 738.320815] env[61868]: value = "task-40961" [ 738.320815] env[61868]: _type = "Task" [ 738.320815] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.329037] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40961, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.830924] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40961, 'name': CreateVM_Task, 'duration_secs': 0.30897} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.831191] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 738.831890] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 738.832242] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 738.835114] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4601fe35-ba49-482d-9512-e6c1fb3c68e8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.868753] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Reconfiguring VM instance to enable vnc on port - 5902 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 738.869137] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-baca1d7b-ca46-4a76-aa7e-4d3610bd0c59 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.885831] env[61868]: DEBUG oslo_vmware.api [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Waiting for the task: (returnval){ [ 738.885831] env[61868]: value = "task-40962" [ 738.885831] env[61868]: _type = "Task" [ 738.885831] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.894329] env[61868]: DEBUG oslo_vmware.api [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Task: {'id': task-40962, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.396493] env[61868]: DEBUG oslo_vmware.api [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Task: {'id': task-40962, 'name': ReconfigVM_Task, 'duration_secs': 0.114753} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.396783] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Reconfigured VM instance to enable vnc on port - 5902 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 739.397056] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.565s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 739.397316] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 739.397459] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 739.397781] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 739.398042] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d5324ed-a031-4f46-adae-9a85cb0b835c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.402704] env[61868]: DEBUG oslo_vmware.api [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Waiting for the task: (returnval){ [ 739.402704] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52bfd9ed-f7f7-b997-82a2-9f4eb990218a" [ 739.402704] env[61868]: _type = "Task" [ 739.402704] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.410466] env[61868]: DEBUG oslo_vmware.api [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52bfd9ed-f7f7-b997-82a2-9f4eb990218a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.913679] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 739.913952] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 739.914166] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 740.379991] env[61868]: DEBUG nova.compute.manager [req-172a3e97-721b-4c57-92a4-c2918b36c39b req-5fe05536-a4ac-4ae8-af90-f65dfad3c04d service nova] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Received event network-changed-62fdd978-7275-4c69-bc24-dd1fda84275a {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 740.380251] env[61868]: DEBUG nova.compute.manager [req-172a3e97-721b-4c57-92a4-c2918b36c39b req-5fe05536-a4ac-4ae8-af90-f65dfad3c04d service nova] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Refreshing instance network info cache due to event network-changed-62fdd978-7275-4c69-bc24-dd1fda84275a. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 740.380464] env[61868]: DEBUG oslo_concurrency.lockutils [req-172a3e97-721b-4c57-92a4-c2918b36c39b req-5fe05536-a4ac-4ae8-af90-f65dfad3c04d service nova] Acquiring lock "refresh_cache-1df194bf-fa9b-4d03-9b20-8478147de566" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 740.380638] env[61868]: DEBUG oslo_concurrency.lockutils [req-172a3e97-721b-4c57-92a4-c2918b36c39b req-5fe05536-a4ac-4ae8-af90-f65dfad3c04d service nova] Acquired lock "refresh_cache-1df194bf-fa9b-4d03-9b20-8478147de566" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 740.381065] env[61868]: DEBUG nova.network.neutron [req-172a3e97-721b-4c57-92a4-c2918b36c39b req-5fe05536-a4ac-4ae8-af90-f65dfad3c04d service nova] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Refreshing network info cache for port 62fdd978-7275-4c69-bc24-dd1fda84275a {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 740.629413] env[61868]: DEBUG nova.network.neutron [req-172a3e97-721b-4c57-92a4-c2918b36c39b req-5fe05536-a4ac-4ae8-af90-f65dfad3c04d service nova] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Updated VIF entry in instance network info cache for port 62fdd978-7275-4c69-bc24-dd1fda84275a. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 740.629759] env[61868]: DEBUG nova.network.neutron [req-172a3e97-721b-4c57-92a4-c2918b36c39b req-5fe05536-a4ac-4ae8-af90-f65dfad3c04d service nova] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Updating instance_info_cache with network_info: [{"id": "62fdd978-7275-4c69-bc24-dd1fda84275a", "address": "fa:16:3e:e0:87:55", "network": {"id": "2f2de643-6f4f-4d17-ac19-f7b50fe34275", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-260532300-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "0555b04752b6461fa88bb82f89bbb753", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62fdd978-72", "ovs_interfaceid": "62fdd978-7275-4c69-bc24-dd1fda84275a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.638802] env[61868]: DEBUG oslo_concurrency.lockutils [req-172a3e97-721b-4c57-92a4-c2918b36c39b req-5fe05536-a4ac-4ae8-af90-f65dfad3c04d service nova] Releasing lock "refresh_cache-1df194bf-fa9b-4d03-9b20-8478147de566" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 758.348642] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 761.352217] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 761.352522] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 762.346824] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 762.350576] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 763.352901] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 763.352901] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 763.352901] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 763.375395] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 763.375570] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 763.375683] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 763.375806] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 763.375928] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 763.376067] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 763.376190] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 763.376304] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 763.376417] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 763.376531] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 763.376644] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 763.377301] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 763.377444] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 763.389746] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 763.389986] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 763.390149] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 763.390336] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 763.391536] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d07faade-a8ec-44cd-865e-fd32079fdd7c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.401342] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1f7698-7a3f-4c04-91fc-4addd625334d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.416580] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b470b94-799c-4da0-9f8c-9061f8e262e5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.424852] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05a6f51a-0791-4e8a-9a33-1515eaa19683 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.459393] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181959MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 763.459568] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 763.459780] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 763.536937] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance b9c8dd4c-8a19-4ffb-8e57-b273c000f121 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 763.537295] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 05c290e0-e98f-4f66-9e2c-f7d21992bb88 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 763.537540] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance c1efc2dd-6474-4fba-a00e-f104f0d446de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 763.537771] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 763.537991] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 882b26da-9f56-4bec-b10b-7b46b4c6ae04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 763.538210] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4cfa680a-0ea3-4c40-b89c-b6067397427a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 763.538446] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ffe7c98f-1367-44fa-b8a2-f34b8de7dfde actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 763.538671] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4ed52e2d-018f-4405-9380-0c7f62ef2db3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 763.538892] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 2466fe4e-2589-4417-a63a-4d8bc695109d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 763.539109] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 1df194bf-fa9b-4d03-9b20-8478147de566 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 763.550869] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4cec72dc-99c1-4cf9-b391-a909bab7fb23 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.563531] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 24d274d6-04c7-4f4a-941b-31c539054dc5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.575296] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 0e6a35a2-c4cf-4e4a-9f87-45d0d4c5fead has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.587891] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance a19a14cd-b92d-4b3d-a06a-75b59333d1af has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.600400] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 46a45c39-ab23-4918-9d7e-84093c2b7ce8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.611783] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ac5d7c83-1d3c-46b8-af48-4ec6c57ad070 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.623590] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 9cb259e7-5102-4610-92ba-f30bbbcdbd9c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.636048] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 32ef9425-a629-4284-81d2-0dfa848e4420 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.651137] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 7b2e8196-5ec1-4cef-9183-2d4f43639a47 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.663880] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance a9f648e5-c026-4bcf-a4b4-81cfbc5532a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.675179] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 7c1bddc6-f5e5-4691-9ef6-e095d4f56ef4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.685337] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 80be1533-08c9-4a44-975b-90ed5ac5402a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.695900] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 727c1245-a258-4b71-93bf-10977d80b3ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.710664] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 972ab1c7-03b0-4294-930c-8084674083ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.719184] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 18918990-c7b2-40b4-9683-ae0635fcc367 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.735562] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 34945829-cc74-4bae-9af7-99504a130e2b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.746601] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 62c9fb4b-cea9-4ab4-bef9-cd3d80bf5df5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.756745] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8cfd5561-08f1-49b8-b518-73104a987fc9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.766818] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance f4eb7096-af70-4a46-8e9d-2b94a185afcb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.777137] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 7a2f7e4c-5dde-456d-bf41-c2d0cc507d17 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.786866] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d6ac9ed4-56dd-493a-8d9f-0cfad210b6de has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.787101] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 763.787249] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 764.204664] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e05724e4-5962-406f-9611-677a592236da {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.212760] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c6e10e-c34f-4473-b6af-ce593c505a6b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.243217] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9158236-02fd-4faf-9136-ce4c00b4e42e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.251224] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce720191-b198-4ad5-9669-1b10f7de4f84 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.265962] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 764.274248] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 764.291914] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 764.292140] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.832s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 765.266822] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 765.267162] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 765.267319] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 780.703117] env[61868]: WARNING oslo_vmware.rw_handles [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 780.703117] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 780.703117] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 780.703117] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 780.703117] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 780.703117] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 780.703117] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 780.703117] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 780.703117] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 780.703117] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 780.703117] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 780.703117] env[61868]: ERROR oslo_vmware.rw_handles [ 780.703753] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/9d5e063a-eb82-4f16-b3e8-ff08eba0b564/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 780.705167] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 780.705413] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Copying Virtual Disk [datastore2] vmware_temp/9d5e063a-eb82-4f16-b3e8-ff08eba0b564/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/9d5e063a-eb82-4f16-b3e8-ff08eba0b564/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 780.705694] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-36a92f0f-5a79-4b77-8bf8-0cc188eaf25b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.717426] env[61868]: DEBUG oslo_vmware.api [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Waiting for the task: (returnval){ [ 780.717426] env[61868]: value = "task-40963" [ 780.717426] env[61868]: _type = "Task" [ 780.717426] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.726751] env[61868]: DEBUG oslo_vmware.api [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Task: {'id': task-40963, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.228648] env[61868]: DEBUG oslo_vmware.exceptions [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 781.228953] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 781.229500] env[61868]: ERROR nova.compute.manager [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 781.229500] env[61868]: Faults: ['InvalidArgument'] [ 781.229500] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Traceback (most recent call last): [ 781.229500] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 781.229500] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] yield resources [ 781.229500] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 781.229500] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] self.driver.spawn(context, instance, image_meta, [ 781.229500] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 781.229500] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] self._vmops.spawn(context, instance, image_meta, injected_files, [ 781.229500] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 781.229500] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] self._fetch_image_if_missing(context, vi) [ 781.229500] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 781.229904] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] image_cache(vi, tmp_image_ds_loc) [ 781.229904] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 781.229904] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] vm_util.copy_virtual_disk( [ 781.229904] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 781.229904] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] session._wait_for_task(vmdk_copy_task) [ 781.229904] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 781.229904] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] return self.wait_for_task(task_ref) [ 781.229904] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 781.229904] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] return evt.wait() [ 781.229904] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 781.229904] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] result = hub.switch() [ 781.229904] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 781.229904] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] return self.greenlet.switch() [ 781.230298] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 781.230298] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] self.f(*self.args, **self.kw) [ 781.230298] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 781.230298] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] raise exceptions.translate_fault(task_info.error) [ 781.230298] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 781.230298] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Faults: ['InvalidArgument'] [ 781.230298] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] [ 781.230298] env[61868]: INFO nova.compute.manager [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Terminating instance [ 781.232216] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Acquiring lock "refresh_cache-b9c8dd4c-8a19-4ffb-8e57-b273c000f121" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 781.232376] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Acquired lock "refresh_cache-b9c8dd4c-8a19-4ffb-8e57-b273c000f121" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 781.232545] env[61868]: DEBUG nova.network.neutron [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 781.234060] env[61868]: DEBUG oslo_concurrency.lockutils [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 781.234368] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 781.234621] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-60839899-4ee8-405e-987d-73d82839e123 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.243783] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 781.243982] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 781.244959] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72a68260-7daf-4667-9a55-2dc5d736f3d1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.252409] env[61868]: DEBUG oslo_vmware.api [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Waiting for the task: (returnval){ [ 781.252409] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52f7a93b-2807-ce5d-a51b-1e9e81328ba4" [ 781.252409] env[61868]: _type = "Task" [ 781.252409] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.262211] env[61868]: DEBUG oslo_vmware.api [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52f7a93b-2807-ce5d-a51b-1e9e81328ba4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.263687] env[61868]: DEBUG nova.network.neutron [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 781.293378] env[61868]: DEBUG nova.network.neutron [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.302699] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Releasing lock "refresh_cache-b9c8dd4c-8a19-4ffb-8e57-b273c000f121" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 781.303256] env[61868]: DEBUG nova.compute.manager [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 781.303590] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 781.304752] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-256f8c6b-c742-4162-970b-ca05645d9149 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.313259] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 781.313657] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc3ebe3a-6f02-4383-a9a8-df9b9d44c563 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.350362] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 781.350976] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 781.351285] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Deleting the datastore file [datastore2] b9c8dd4c-8a19-4ffb-8e57-b273c000f121 {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 781.351689] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d0636536-992a-4bea-b45f-f35a3c12ff7d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.359220] env[61868]: DEBUG oslo_vmware.api [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Waiting for the task: (returnval){ [ 781.359220] env[61868]: value = "task-40965" [ 781.359220] env[61868]: _type = "Task" [ 781.359220] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.370260] env[61868]: DEBUG oslo_vmware.api [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Task: {'id': task-40965, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.763308] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 781.763703] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Creating directory with path [datastore2] vmware_temp/525001aa-1060-4a76-9ead-91a1b3c81509/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 781.763703] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4701a4aa-2b16-4dde-8aae-2088f9cf827a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.775171] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Created directory with path [datastore2] vmware_temp/525001aa-1060-4a76-9ead-91a1b3c81509/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 781.775378] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Fetch image to [datastore2] vmware_temp/525001aa-1060-4a76-9ead-91a1b3c81509/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 781.775553] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/525001aa-1060-4a76-9ead-91a1b3c81509/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 781.776461] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024c1b15-4876-49b6-ac7b-9b0abfe0e509 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.783608] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32951747-e039-47f9-b4e4-1056aa12941b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.792902] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdefc9f5-1d3d-4985-b588-719d57409b4f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.823471] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfcb5566-51ab-402e-b44a-f762e7f1c4cd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.830211] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-022e2be0-8cf1-4b47-93ff-96034c864518 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.851155] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 781.869658] env[61868]: DEBUG oslo_vmware.api [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Task: {'id': task-40965, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.045958} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.869760] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 781.869942] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 781.870118] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 781.870294] env[61868]: INFO nova.compute.manager [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Took 0.57 seconds to destroy the instance on the hypervisor. [ 781.870531] env[61868]: DEBUG oslo.service.loopingcall [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 781.870749] env[61868]: DEBUG nova.compute.manager [-] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 781.870852] env[61868]: DEBUG nova.network.neutron [-] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 781.891685] env[61868]: DEBUG nova.network.neutron [-] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 781.899514] env[61868]: DEBUG nova.network.neutron [-] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.902007] env[61868]: DEBUG oslo_vmware.rw_handles [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/525001aa-1060-4a76-9ead-91a1b3c81509/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 781.954786] env[61868]: INFO nova.compute.manager [-] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Took 0.08 seconds to deallocate network for instance. [ 781.956967] env[61868]: DEBUG nova.compute.claims [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 781.957137] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 781.957352] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 781.962507] env[61868]: DEBUG oslo_vmware.rw_handles [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 781.962670] env[61868]: DEBUG oslo_vmware.rw_handles [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/525001aa-1060-4a76-9ead-91a1b3c81509/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 782.392807] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eff7a784-42ff-4035-bd98-7e2727918485 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.400810] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba739d6c-cdf7-45c3-9798-0b7a6c3f6646 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.431333] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fca2c2b-245c-43d4-a00f-fb054548d014 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.439373] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f5d76a-320f-454b-a193-a1b5456924bf {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.453048] env[61868]: DEBUG nova.compute.provider_tree [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 782.462055] env[61868]: DEBUG nova.scheduler.client.report [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 782.479250] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.522s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 782.480042] env[61868]: ERROR nova.compute.manager [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 782.480042] env[61868]: Faults: ['InvalidArgument'] [ 782.480042] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Traceback (most recent call last): [ 782.480042] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 782.480042] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] self.driver.spawn(context, instance, image_meta, [ 782.480042] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 782.480042] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] self._vmops.spawn(context, instance, image_meta, injected_files, [ 782.480042] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 782.480042] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] self._fetch_image_if_missing(context, vi) [ 782.480042] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 782.480042] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] image_cache(vi, tmp_image_ds_loc) [ 782.480042] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 782.480439] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] vm_util.copy_virtual_disk( [ 782.480439] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 782.480439] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] session._wait_for_task(vmdk_copy_task) [ 782.480439] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 782.480439] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] return self.wait_for_task(task_ref) [ 782.480439] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 782.480439] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] return evt.wait() [ 782.480439] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 782.480439] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] result = hub.switch() [ 782.480439] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 782.480439] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] return self.greenlet.switch() [ 782.480439] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 782.480439] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] self.f(*self.args, **self.kw) [ 782.480832] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 782.480832] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] raise exceptions.translate_fault(task_info.error) [ 782.480832] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 782.480832] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Faults: ['InvalidArgument'] [ 782.480832] env[61868]: ERROR nova.compute.manager [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] [ 782.481497] env[61868]: DEBUG nova.compute.utils [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 782.483105] env[61868]: DEBUG nova.compute.manager [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Build of instance b9c8dd4c-8a19-4ffb-8e57-b273c000f121 was re-scheduled: A specified parameter was not correct: fileType [ 782.483105] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 782.483485] env[61868]: DEBUG nova.compute.manager [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 782.483712] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Acquiring lock "refresh_cache-b9c8dd4c-8a19-4ffb-8e57-b273c000f121" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 782.483858] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Acquired lock "refresh_cache-b9c8dd4c-8a19-4ffb-8e57-b273c000f121" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 782.484030] env[61868]: DEBUG nova.network.neutron [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 782.513230] env[61868]: DEBUG nova.network.neutron [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 782.538157] env[61868]: DEBUG nova.network.neutron [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.547684] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Releasing lock "refresh_cache-b9c8dd4c-8a19-4ffb-8e57-b273c000f121" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 782.547918] env[61868]: DEBUG nova.compute.manager [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 782.548102] env[61868]: DEBUG nova.compute.manager [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 782.548273] env[61868]: DEBUG nova.network.neutron [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 782.566881] env[61868]: DEBUG nova.network.neutron [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 782.574417] env[61868]: DEBUG nova.network.neutron [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.582661] env[61868]: INFO nova.compute.manager [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] [instance: b9c8dd4c-8a19-4ffb-8e57-b273c000f121] Took 0.03 seconds to deallocate network for instance. [ 782.678152] env[61868]: INFO nova.scheduler.client.report [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Deleted allocations for instance b9c8dd4c-8a19-4ffb-8e57-b273c000f121 [ 782.704508] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1c56de7b-65d6-4e51-b3bb-0c648af2d2ab tempest-ServersAdminNegativeTestJSON-1766368982 tempest-ServersAdminNegativeTestJSON-1766368982-project-member] Lock "b9c8dd4c-8a19-4ffb-8e57-b273c000f121" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 194.051s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 782.724390] env[61868]: DEBUG nova.compute.manager [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 782.786260] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 782.786542] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 782.788196] env[61868]: INFO nova.compute.claims [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 783.222276] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c3d5d8-d304-4afb-8ad5-97c905f2bfc5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.230129] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b0f64e0-9c6c-4469-a260-b690d64c0430 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.259570] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16f3f6be-ad2f-46f1-a73f-1caf22319e08 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.267407] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acaa5e46-c2b7-4b95-af71-0b8871308507 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.281750] env[61868]: DEBUG nova.compute.provider_tree [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 783.290621] env[61868]: DEBUG nova.scheduler.client.report [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 783.307493] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.521s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 783.307968] env[61868]: DEBUG nova.compute.manager [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 783.355358] env[61868]: DEBUG nova.compute.utils [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 783.356858] env[61868]: DEBUG nova.compute.manager [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 783.357027] env[61868]: DEBUG nova.network.neutron [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 783.369575] env[61868]: DEBUG nova.compute.manager [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 783.406377] env[61868]: DEBUG nova.policy [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5709d010ab084542955afb74abbb13ba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b30744d62b09496aa99f3c1a24e9fd8b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 783.454071] env[61868]: DEBUG nova.compute.manager [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 783.476731] env[61868]: DEBUG nova.virt.hardware [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 783.476731] env[61868]: DEBUG nova.virt.hardware [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 783.477142] env[61868]: DEBUG nova.virt.hardware [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 783.477142] env[61868]: DEBUG nova.virt.hardware [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 783.477142] env[61868]: DEBUG nova.virt.hardware [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 783.477646] env[61868]: DEBUG nova.virt.hardware [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 783.477646] env[61868]: DEBUG nova.virt.hardware [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 783.477821] env[61868]: DEBUG nova.virt.hardware [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 783.478144] env[61868]: DEBUG nova.virt.hardware [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 783.478333] env[61868]: DEBUG nova.virt.hardware [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 783.478553] env[61868]: DEBUG nova.virt.hardware [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 783.479474] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d72573be-b058-454a-ad45-0a40e410fa3c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.488042] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-371929de-541c-4977-96c9-d0c4782fa60e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.690400] env[61868]: DEBUG nova.network.neutron [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Successfully created port: ef53aedf-e22e-47ae-9876-63e447109deb {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 784.399995] env[61868]: DEBUG nova.network.neutron [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Successfully updated port: ef53aedf-e22e-47ae-9876-63e447109deb {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 784.411230] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Acquiring lock "refresh_cache-4cec72dc-99c1-4cf9-b391-a909bab7fb23" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 784.411412] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Acquired lock "refresh_cache-4cec72dc-99c1-4cf9-b391-a909bab7fb23" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 784.411412] env[61868]: DEBUG nova.network.neutron [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 784.465912] env[61868]: DEBUG nova.network.neutron [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 784.486920] env[61868]: DEBUG nova.compute.manager [req-105fd448-066b-4101-9d57-b833b4b14986 req-0d5468a0-3348-4579-b87c-b5c79c5cbaf8 service nova] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Received event network-vif-plugged-ef53aedf-e22e-47ae-9876-63e447109deb {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 784.487149] env[61868]: DEBUG oslo_concurrency.lockutils [req-105fd448-066b-4101-9d57-b833b4b14986 req-0d5468a0-3348-4579-b87c-b5c79c5cbaf8 service nova] Acquiring lock "4cec72dc-99c1-4cf9-b391-a909bab7fb23-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 784.487357] env[61868]: DEBUG oslo_concurrency.lockutils [req-105fd448-066b-4101-9d57-b833b4b14986 req-0d5468a0-3348-4579-b87c-b5c79c5cbaf8 service nova] Lock "4cec72dc-99c1-4cf9-b391-a909bab7fb23-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 784.487521] env[61868]: DEBUG oslo_concurrency.lockutils [req-105fd448-066b-4101-9d57-b833b4b14986 req-0d5468a0-3348-4579-b87c-b5c79c5cbaf8 service nova] Lock "4cec72dc-99c1-4cf9-b391-a909bab7fb23-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 784.487688] env[61868]: DEBUG nova.compute.manager [req-105fd448-066b-4101-9d57-b833b4b14986 req-0d5468a0-3348-4579-b87c-b5c79c5cbaf8 service nova] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] No waiting events found dispatching network-vif-plugged-ef53aedf-e22e-47ae-9876-63e447109deb {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 784.487852] env[61868]: WARNING nova.compute.manager [req-105fd448-066b-4101-9d57-b833b4b14986 req-0d5468a0-3348-4579-b87c-b5c79c5cbaf8 service nova] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Received unexpected event network-vif-plugged-ef53aedf-e22e-47ae-9876-63e447109deb for instance with vm_state building and task_state spawning. [ 784.660255] env[61868]: DEBUG nova.network.neutron [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Updating instance_info_cache with network_info: [{"id": "ef53aedf-e22e-47ae-9876-63e447109deb", "address": "fa:16:3e:04:67:d7", "network": {"id": "1e064381-a2fc-40f8-a4ae-0ec9cc9f1d27", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1910286341-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "b30744d62b09496aa99f3c1a24e9fd8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9bb629cd-6d0f-4bed-965c-bd04a2f3ec49", "external-id": "nsx-vlan-transportzone-848", "segmentation_id": 848, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef53aedf-e2", "ovs_interfaceid": "ef53aedf-e22e-47ae-9876-63e447109deb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.676376] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Releasing lock "refresh_cache-4cec72dc-99c1-4cf9-b391-a909bab7fb23" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 784.676681] env[61868]: DEBUG nova.compute.manager [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Instance network_info: |[{"id": "ef53aedf-e22e-47ae-9876-63e447109deb", "address": "fa:16:3e:04:67:d7", "network": {"id": "1e064381-a2fc-40f8-a4ae-0ec9cc9f1d27", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1910286341-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "b30744d62b09496aa99f3c1a24e9fd8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9bb629cd-6d0f-4bed-965c-bd04a2f3ec49", "external-id": "nsx-vlan-transportzone-848", "segmentation_id": 848, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef53aedf-e2", "ovs_interfaceid": "ef53aedf-e22e-47ae-9876-63e447109deb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 784.677642] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:67:d7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9bb629cd-6d0f-4bed-965c-bd04a2f3ec49', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ef53aedf-e22e-47ae-9876-63e447109deb', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 784.684695] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Creating folder: Project (b30744d62b09496aa99f3c1a24e9fd8b). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 784.685362] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dee41ddb-8243-4146-87ee-b16e74a3e06b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.699615] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Created folder: Project (b30744d62b09496aa99f3c1a24e9fd8b) in parent group-v18181. [ 784.699937] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Creating folder: Instances. Parent ref: group-v18225. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 784.700289] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1e6a98bb-c04e-46c0-b3bf-6fa05f8b2681 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.710723] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Created folder: Instances in parent group-v18225. [ 784.710997] env[61868]: DEBUG oslo.service.loopingcall [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 784.711201] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 784.711408] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bf825b35-933a-43ac-9b7c-81c0284c3b3f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.732391] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 784.732391] env[61868]: value = "task-40968" [ 784.732391] env[61868]: _type = "Task" [ 784.732391] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.741126] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40968, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.244905] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40968, 'name': CreateVM_Task, 'duration_secs': 0.322738} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.245150] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 785.245769] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 785.246010] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 785.249983] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a2819f-956f-48ef-8a93-ce8e33c2f837 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.287426] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Reconfiguring VM instance to enable vnc on port - 5903 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 785.288168] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-45346ce5-9880-42bc-96a9-cc2e82c81ba2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.306024] env[61868]: DEBUG oslo_vmware.api [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Waiting for the task: (returnval){ [ 785.306024] env[61868]: value = "task-40969" [ 785.306024] env[61868]: _type = "Task" [ 785.306024] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.316168] env[61868]: DEBUG oslo_vmware.api [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Task: {'id': task-40969, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.816253] env[61868]: DEBUG oslo_vmware.api [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Task: {'id': task-40969, 'name': ReconfigVM_Task, 'duration_secs': 0.113133} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.816588] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Reconfigured VM instance to enable vnc on port - 5903 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 785.816733] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.571s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 785.816975] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 785.817113] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 785.817457] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 785.817711] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71b020ef-01e3-4674-b319-cd54dd1179b3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.823009] env[61868]: DEBUG oslo_vmware.api [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Waiting for the task: (returnval){ [ 785.823009] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52c0a85f-2d6e-2288-f00f-ea2f89fdcee0" [ 785.823009] env[61868]: _type = "Task" [ 785.823009] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.831346] env[61868]: DEBUG oslo_vmware.api [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52c0a85f-2d6e-2288-f00f-ea2f89fdcee0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.334236] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 786.334666] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 786.334963] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 786.835279] env[61868]: DEBUG nova.compute.manager [req-b87bfb9d-a79b-416d-be75-58a543048386 req-f14781a2-aad9-43d8-b4be-aba0a7c37b0d service nova] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Received event network-changed-ef53aedf-e22e-47ae-9876-63e447109deb {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 786.835279] env[61868]: DEBUG nova.compute.manager [req-b87bfb9d-a79b-416d-be75-58a543048386 req-f14781a2-aad9-43d8-b4be-aba0a7c37b0d service nova] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Refreshing instance network info cache due to event network-changed-ef53aedf-e22e-47ae-9876-63e447109deb. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 786.835543] env[61868]: DEBUG oslo_concurrency.lockutils [req-b87bfb9d-a79b-416d-be75-58a543048386 req-f14781a2-aad9-43d8-b4be-aba0a7c37b0d service nova] Acquiring lock "refresh_cache-4cec72dc-99c1-4cf9-b391-a909bab7fb23" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 786.835609] env[61868]: DEBUG oslo_concurrency.lockutils [req-b87bfb9d-a79b-416d-be75-58a543048386 req-f14781a2-aad9-43d8-b4be-aba0a7c37b0d service nova] Acquired lock "refresh_cache-4cec72dc-99c1-4cf9-b391-a909bab7fb23" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 786.835731] env[61868]: DEBUG nova.network.neutron [req-b87bfb9d-a79b-416d-be75-58a543048386 req-f14781a2-aad9-43d8-b4be-aba0a7c37b0d service nova] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Refreshing network info cache for port ef53aedf-e22e-47ae-9876-63e447109deb {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 787.070473] env[61868]: DEBUG nova.network.neutron [req-b87bfb9d-a79b-416d-be75-58a543048386 req-f14781a2-aad9-43d8-b4be-aba0a7c37b0d service nova] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Updated VIF entry in instance network info cache for port ef53aedf-e22e-47ae-9876-63e447109deb. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 787.070910] env[61868]: DEBUG nova.network.neutron [req-b87bfb9d-a79b-416d-be75-58a543048386 req-f14781a2-aad9-43d8-b4be-aba0a7c37b0d service nova] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Updating instance_info_cache with network_info: [{"id": "ef53aedf-e22e-47ae-9876-63e447109deb", "address": "fa:16:3e:04:67:d7", "network": {"id": "1e064381-a2fc-40f8-a4ae-0ec9cc9f1d27", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1910286341-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "b30744d62b09496aa99f3c1a24e9fd8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9bb629cd-6d0f-4bed-965c-bd04a2f3ec49", "external-id": "nsx-vlan-transportzone-848", "segmentation_id": 848, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef53aedf-e2", "ovs_interfaceid": "ef53aedf-e22e-47ae-9876-63e447109deb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.080743] env[61868]: DEBUG oslo_concurrency.lockutils [req-b87bfb9d-a79b-416d-be75-58a543048386 req-f14781a2-aad9-43d8-b4be-aba0a7c37b0d service nova] Releasing lock "refresh_cache-4cec72dc-99c1-4cf9-b391-a909bab7fb23" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 787.621008] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b417360a-544d-41b8-bee0-eaf5568905b4 tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Acquiring lock "05c290e0-e98f-4f66-9e2c-f7d21992bb88" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 787.914021] env[61868]: DEBUG oslo_concurrency.lockutils [None req-519c3c40-2ba9-4cf1-9858-a12ac3da57cb tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Acquiring lock "c1efc2dd-6474-4fba-a00e-f104f0d446de" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 788.855269] env[61868]: DEBUG oslo_concurrency.lockutils [None req-37ee3211-1998-4ab6-bbc6-3ec02c5d794b tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Acquiring lock "3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 798.525734] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0d45206c-b362-431c-8ebb-b5d72678a346 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Acquiring lock "882b26da-9f56-4bec-b10b-7b46b4c6ae04" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 798.740268] env[61868]: DEBUG oslo_concurrency.lockutils [None req-31a0cc5e-84a9-40ff-8d7e-e842048d93f1 tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Acquiring lock "4cfa680a-0ea3-4c40-b89c-b6067397427a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 801.498921] env[61868]: DEBUG oslo_concurrency.lockutils [None req-373fd4a0-64d1-45fa-9d2e-bbae1d56bc39 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Acquiring lock "ffe7c98f-1367-44fa-b8a2-f34b8de7dfde" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 805.951570] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ef095898-399f-403f-9cbd-20597dfd0729 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "4ed52e2d-018f-4405-9380-0c7f62ef2db3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 808.039962] env[61868]: DEBUG oslo_concurrency.lockutils [None req-756ea6cd-2549-494f-94ac-4490abbc0297 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Acquiring lock "2466fe4e-2589-4417-a63a-4d8bc695109d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 809.518641] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4411f1f3-36e7-4c94-a9a3-cda4b418b6ef tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Acquiring lock "1df194bf-fa9b-4d03-9b20-8478147de566" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 810.552528] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0f98003d-3df6-408a-80dd-f8e4ced250ab tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Acquiring lock "4cec72dc-99c1-4cf9-b391-a909bab7fb23" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 815.872640] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Acquiring lock "3394162c-605f-40a1-9dc8-dc5cba6a083f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 815.872942] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Lock "3394162c-605f-40a1-9dc8-dc5cba6a083f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 818.357214] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 818.357214] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Cleaning up deleted instances {{(pid=61868) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 818.366645] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] There are 0 instances to clean {{(pid=61868) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 818.366865] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 818.366995] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Cleaning up deleted instances with incomplete migration {{(pid=61868) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 818.384527] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 822.392496] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 822.392496] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 823.347399] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 823.351129] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 823.370185] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 823.370483] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 823.370778] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 823.370878] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 823.372092] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-858d24dd-e516-4e1d-acde-86ca1ec48707 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.380983] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e87b3be0-dc40-40ee-8c80-503496a83262 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.403415] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-696364c1-7681-4b0b-93a7-4738486fcf42 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.420025] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9ca3ffa-c429-4f9b-9318-7815672a49c3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.457667] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181907MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 823.457840] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 823.458043] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 823.614297] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 05c290e0-e98f-4f66-9e2c-f7d21992bb88 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 823.614513] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance c1efc2dd-6474-4fba-a00e-f104f0d446de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 823.614656] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 823.614792] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 882b26da-9f56-4bec-b10b-7b46b4c6ae04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 823.614913] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4cfa680a-0ea3-4c40-b89c-b6067397427a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 823.615039] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ffe7c98f-1367-44fa-b8a2-f34b8de7dfde actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 823.615157] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4ed52e2d-018f-4405-9380-0c7f62ef2db3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 823.615270] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 2466fe4e-2589-4417-a63a-4d8bc695109d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 823.615410] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 1df194bf-fa9b-4d03-9b20-8478147de566 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 823.615520] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4cec72dc-99c1-4cf9-b391-a909bab7fb23 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 823.633143] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 7b2e8196-5ec1-4cef-9183-2d4f43639a47 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 823.646171] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance a9f648e5-c026-4bcf-a4b4-81cfbc5532a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 823.659439] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 7c1bddc6-f5e5-4691-9ef6-e095d4f56ef4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 823.680094] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 80be1533-08c9-4a44-975b-90ed5ac5402a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 823.699950] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 727c1245-a258-4b71-93bf-10977d80b3ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 823.710221] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 972ab1c7-03b0-4294-930c-8084674083ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 823.723376] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 18918990-c7b2-40b4-9683-ae0635fcc367 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 823.739113] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 34945829-cc74-4bae-9af7-99504a130e2b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 823.756507] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 62c9fb4b-cea9-4ab4-bef9-cd3d80bf5df5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 823.769472] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8cfd5561-08f1-49b8-b518-73104a987fc9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 823.786684] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance f4eb7096-af70-4a46-8e9d-2b94a185afcb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 823.797854] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 7a2f7e4c-5dde-456d-bf41-c2d0cc507d17 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 823.810238] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d6ac9ed4-56dd-493a-8d9f-0cfad210b6de has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 823.825797] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 3394162c-605f-40a1-9dc8-dc5cba6a083f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 823.826074] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 823.826224] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 823.845345] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Refreshing inventories for resource provider 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 823.863534] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Updating ProviderTree inventory for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 823.863810] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Updating inventory in ProviderTree for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 823.880358] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Refreshing aggregate associations for resource provider 6539a0d3-09f9-481f-a837-7ea10081c3cc, aggregates: None {{(pid=61868) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 823.899511] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Refreshing trait associations for resource provider 6539a0d3-09f9-481f-a837-7ea10081c3cc, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61868) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 824.384148] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6b9ee7e-86bb-4787-91bb-c8fc848e7917 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.395222] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ede041b-4776-4af7-9443-347bd8e44cfc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.437828] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5af8354f-adab-494d-8cb1-0c9d63725c27 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.446434] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97a986cf-96fe-4298-a7ad-64e0f7b962bf {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.463218] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 824.493430] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 824.513555] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 824.513763] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.056s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 825.514779] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 825.514779] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 825.515011] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 825.541826] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 825.542012] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 825.542145] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 825.542533] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 825.542599] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 825.542715] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 825.542905] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 825.542951] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 825.543063] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 825.543179] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 825.543294] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 825.543879] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 825.543987] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 825.544160] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 826.351728] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 826.352148] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 829.400161] env[61868]: WARNING oslo_vmware.rw_handles [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 829.400161] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 829.400161] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 829.400161] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 829.400161] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 829.400161] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 829.400161] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 829.400161] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 829.400161] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 829.400161] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 829.400161] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 829.400161] env[61868]: ERROR oslo_vmware.rw_handles [ 829.400161] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/525001aa-1060-4a76-9ead-91a1b3c81509/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 829.404941] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 829.404941] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Copying Virtual Disk [datastore2] vmware_temp/525001aa-1060-4a76-9ead-91a1b3c81509/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/525001aa-1060-4a76-9ead-91a1b3c81509/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 829.404941] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1fd75949-7639-4b62-a873-0815830867bf {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.408046] env[61868]: DEBUG oslo_vmware.api [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Waiting for the task: (returnval){ [ 829.408046] env[61868]: value = "task-40975" [ 829.408046] env[61868]: _type = "Task" [ 829.408046] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.420830] env[61868]: DEBUG oslo_vmware.api [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Task: {'id': task-40975, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.472988] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "26f77431-9a5d-444d-b345-10108c34b59b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 829.473432] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "26f77431-9a5d-444d-b345-10108c34b59b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 829.919446] env[61868]: DEBUG oslo_vmware.exceptions [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 829.919728] env[61868]: DEBUG oslo_concurrency.lockutils [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 829.920330] env[61868]: ERROR nova.compute.manager [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 829.920330] env[61868]: Faults: ['InvalidArgument'] [ 829.920330] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Traceback (most recent call last): [ 829.920330] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 829.920330] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] yield resources [ 829.920330] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 829.920330] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] self.driver.spawn(context, instance, image_meta, [ 829.920330] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 829.920330] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] self._vmops.spawn(context, instance, image_meta, injected_files, [ 829.920330] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 829.920330] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] self._fetch_image_if_missing(context, vi) [ 829.920330] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 829.920783] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] image_cache(vi, tmp_image_ds_loc) [ 829.920783] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 829.920783] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] vm_util.copy_virtual_disk( [ 829.920783] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 829.920783] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] session._wait_for_task(vmdk_copy_task) [ 829.920783] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 829.920783] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] return self.wait_for_task(task_ref) [ 829.920783] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 829.920783] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] return evt.wait() [ 829.920783] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 829.920783] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] result = hub.switch() [ 829.920783] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 829.920783] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] return self.greenlet.switch() [ 829.921189] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 829.921189] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] self.f(*self.args, **self.kw) [ 829.921189] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 829.921189] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] raise exceptions.translate_fault(task_info.error) [ 829.921189] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 829.921189] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Faults: ['InvalidArgument'] [ 829.921189] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] [ 829.921189] env[61868]: INFO nova.compute.manager [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Terminating instance [ 829.922406] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 829.922489] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 829.922991] env[61868]: DEBUG oslo_concurrency.lockutils [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Acquiring lock "refresh_cache-05c290e0-e98f-4f66-9e2c-f7d21992bb88" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 829.923141] env[61868]: DEBUG oslo_concurrency.lockutils [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Acquired lock "refresh_cache-05c290e0-e98f-4f66-9e2c-f7d21992bb88" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 829.923329] env[61868]: DEBUG nova.network.neutron [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 829.924747] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7467c2d7-eccf-41e7-af83-debf7bc3c1e5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.936300] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 829.936491] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 829.950329] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3d095be-88a2-436e-91bf-7eb280b41dc0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.959852] env[61868]: DEBUG oslo_vmware.api [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Waiting for the task: (returnval){ [ 829.959852] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52f9364b-bb86-d569-f229-24961ced3e88" [ 829.959852] env[61868]: _type = "Task" [ 829.959852] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.968932] env[61868]: DEBUG oslo_vmware.api [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52f9364b-bb86-d569-f229-24961ced3e88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.970493] env[61868]: DEBUG nova.network.neutron [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 830.003839] env[61868]: DEBUG nova.network.neutron [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.022569] env[61868]: DEBUG oslo_concurrency.lockutils [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Releasing lock "refresh_cache-05c290e0-e98f-4f66-9e2c-f7d21992bb88" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 830.023011] env[61868]: DEBUG nova.compute.manager [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 830.023219] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 830.024325] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7350e7dd-4da9-4620-9868-ba438cf7e40c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.041038] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 830.041254] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6d7cacaa-7821-49ac-a27f-ca7ec7e91cd8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.074887] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 830.075188] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 830.075410] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Deleting the datastore file [datastore2] 05c290e0-e98f-4f66-9e2c-f7d21992bb88 {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 830.075761] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8180f3dd-a3d5-4e5c-a557-447dd75426f2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.084170] env[61868]: DEBUG oslo_vmware.api [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Waiting for the task: (returnval){ [ 830.084170] env[61868]: value = "task-40977" [ 830.084170] env[61868]: _type = "Task" [ 830.084170] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.106919] env[61868]: DEBUG oslo_vmware.api [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Task: {'id': task-40977, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.416933] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1dd17edf-740f-4dd6-8e3f-88ca65d418b5 tempest-VolumesAdminNegativeTest-1661419758 tempest-VolumesAdminNegativeTest-1661419758-project-member] Acquiring lock "13ffc1dc-81be-40bc-94cf-b9ac06d98511" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 830.417293] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1dd17edf-740f-4dd6-8e3f-88ca65d418b5 tempest-VolumesAdminNegativeTest-1661419758 tempest-VolumesAdminNegativeTest-1661419758-project-member] Lock "13ffc1dc-81be-40bc-94cf-b9ac06d98511" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 830.473795] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 830.474064] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Creating directory with path [datastore2] vmware_temp/6fbae0ef-f049-4da1-bf76-c201e0da09d6/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 830.474310] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4bb34bd8-e97a-425c-9a20-2fcd9d669249 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.487036] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Created directory with path [datastore2] vmware_temp/6fbae0ef-f049-4da1-bf76-c201e0da09d6/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 830.487244] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Fetch image to [datastore2] vmware_temp/6fbae0ef-f049-4da1-bf76-c201e0da09d6/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 830.487407] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/6fbae0ef-f049-4da1-bf76-c201e0da09d6/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 830.492502] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c787fad-b20e-4b3c-8843-add7b5b9f9d4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.505283] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a8b4f43-edbf-438f-b536-8d7bf90dc81d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.515544] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0765905-b152-4d05-8003-53cbadd778de {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.547388] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef40bcd0-1332-493e-b9a6-ea98dfd05aed {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.553980] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f5e1b623-bcc2-44b6-963c-c111ecd7592b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.575026] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 830.596072] env[61868]: DEBUG oslo_vmware.api [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Task: {'id': task-40977, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.03363} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.599403] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 830.599594] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 830.599765] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 830.599929] env[61868]: INFO nova.compute.manager [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Took 0.58 seconds to destroy the instance on the hypervisor. [ 830.600175] env[61868]: DEBUG oslo.service.loopingcall [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 830.604155] env[61868]: DEBUG nova.compute.manager [-] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 830.604266] env[61868]: DEBUG nova.network.neutron [-] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 830.640545] env[61868]: DEBUG nova.network.neutron [-] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 830.647127] env[61868]: DEBUG oslo_vmware.rw_handles [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6fbae0ef-f049-4da1-bf76-c201e0da09d6/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 830.650753] env[61868]: DEBUG nova.network.neutron [-] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.710964] env[61868]: INFO nova.compute.manager [-] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Took 0.11 seconds to deallocate network for instance. [ 830.713338] env[61868]: DEBUG nova.compute.claims [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 830.713557] env[61868]: DEBUG oslo_concurrency.lockutils [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 830.713780] env[61868]: DEBUG oslo_concurrency.lockutils [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 830.717257] env[61868]: DEBUG oslo_vmware.rw_handles [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 830.717425] env[61868]: DEBUG oslo_vmware.rw_handles [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6fbae0ef-f049-4da1-bf76-c201e0da09d6/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 831.095263] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a99eaf1-c9de-4dd2-8615-923c96faa1ea {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.103281] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1e4db49-1fc1-4a11-89e7-d445085dfcfb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.142892] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0360215b-23fb-490f-8956-11370d8cc821 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.152602] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b5d8d1-b659-4fc5-96a1-b99e32443ee5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.170154] env[61868]: DEBUG nova.compute.provider_tree [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 831.180695] env[61868]: DEBUG nova.scheduler.client.report [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 831.212358] env[61868]: DEBUG oslo_concurrency.lockutils [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.498s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 831.212905] env[61868]: ERROR nova.compute.manager [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 831.212905] env[61868]: Faults: ['InvalidArgument'] [ 831.212905] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Traceback (most recent call last): [ 831.212905] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 831.212905] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] self.driver.spawn(context, instance, image_meta, [ 831.212905] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 831.212905] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] self._vmops.spawn(context, instance, image_meta, injected_files, [ 831.212905] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 831.212905] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] self._fetch_image_if_missing(context, vi) [ 831.212905] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 831.212905] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] image_cache(vi, tmp_image_ds_loc) [ 831.212905] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 831.213339] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] vm_util.copy_virtual_disk( [ 831.213339] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 831.213339] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] session._wait_for_task(vmdk_copy_task) [ 831.213339] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 831.213339] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] return self.wait_for_task(task_ref) [ 831.213339] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 831.213339] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] return evt.wait() [ 831.213339] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 831.213339] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] result = hub.switch() [ 831.213339] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 831.213339] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] return self.greenlet.switch() [ 831.213339] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 831.213339] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] self.f(*self.args, **self.kw) [ 831.213705] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 831.213705] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] raise exceptions.translate_fault(task_info.error) [ 831.213705] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 831.213705] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Faults: ['InvalidArgument'] [ 831.213705] env[61868]: ERROR nova.compute.manager [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] [ 831.213705] env[61868]: DEBUG nova.compute.utils [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 831.215652] env[61868]: DEBUG nova.compute.manager [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Build of instance 05c290e0-e98f-4f66-9e2c-f7d21992bb88 was re-scheduled: A specified parameter was not correct: fileType [ 831.215652] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 831.216519] env[61868]: DEBUG nova.compute.manager [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 831.216773] env[61868]: DEBUG oslo_concurrency.lockutils [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Acquiring lock "refresh_cache-05c290e0-e98f-4f66-9e2c-f7d21992bb88" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 831.216974] env[61868]: DEBUG oslo_concurrency.lockutils [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Acquired lock "refresh_cache-05c290e0-e98f-4f66-9e2c-f7d21992bb88" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 831.217086] env[61868]: DEBUG nova.network.neutron [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 831.258058] env[61868]: DEBUG nova.network.neutron [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 831.323889] env[61868]: DEBUG nova.network.neutron [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.336327] env[61868]: DEBUG oslo_concurrency.lockutils [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Releasing lock "refresh_cache-05c290e0-e98f-4f66-9e2c-f7d21992bb88" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 831.336327] env[61868]: DEBUG nova.compute.manager [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 831.336327] env[61868]: DEBUG nova.compute.manager [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 831.336327] env[61868]: DEBUG nova.network.neutron [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 831.372836] env[61868]: DEBUG nova.network.neutron [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 831.381336] env[61868]: DEBUG nova.network.neutron [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.390237] env[61868]: INFO nova.compute.manager [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Took 0.05 seconds to deallocate network for instance. [ 831.503128] env[61868]: INFO nova.scheduler.client.report [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Deleted allocations for instance 05c290e0-e98f-4f66-9e2c-f7d21992bb88 [ 831.526534] env[61868]: DEBUG oslo_concurrency.lockutils [None req-24ba5ae3-4b0f-4744-822d-f8d54efe093b tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Lock "05c290e0-e98f-4f66-9e2c-f7d21992bb88" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 242.411s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 831.527720] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b417360a-544d-41b8-bee0-eaf5568905b4 tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Lock "05c290e0-e98f-4f66-9e2c-f7d21992bb88" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 43.907s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 831.527958] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b417360a-544d-41b8-bee0-eaf5568905b4 tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Acquiring lock "05c290e0-e98f-4f66-9e2c-f7d21992bb88-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 831.528175] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b417360a-544d-41b8-bee0-eaf5568905b4 tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Lock "05c290e0-e98f-4f66-9e2c-f7d21992bb88-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 831.528341] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b417360a-544d-41b8-bee0-eaf5568905b4 tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Lock "05c290e0-e98f-4f66-9e2c-f7d21992bb88-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 831.530399] env[61868]: INFO nova.compute.manager [None req-b417360a-544d-41b8-bee0-eaf5568905b4 tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Terminating instance [ 831.532074] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b417360a-544d-41b8-bee0-eaf5568905b4 tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Acquiring lock "refresh_cache-05c290e0-e98f-4f66-9e2c-f7d21992bb88" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 831.532342] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b417360a-544d-41b8-bee0-eaf5568905b4 tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Acquired lock "refresh_cache-05c290e0-e98f-4f66-9e2c-f7d21992bb88" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 831.532456] env[61868]: DEBUG nova.network.neutron [None req-b417360a-544d-41b8-bee0-eaf5568905b4 tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 831.549977] env[61868]: DEBUG nova.compute.manager [None req-395ee023-b17e-4a59-b830-7a86389c0fc3 tempest-ServerDiagnosticsNegativeTest-262233249 tempest-ServerDiagnosticsNegativeTest-262233249-project-member] [instance: 24d274d6-04c7-4f4a-941b-31c539054dc5] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 831.589562] env[61868]: DEBUG nova.compute.manager [None req-395ee023-b17e-4a59-b830-7a86389c0fc3 tempest-ServerDiagnosticsNegativeTest-262233249 tempest-ServerDiagnosticsNegativeTest-262233249-project-member] [instance: 24d274d6-04c7-4f4a-941b-31c539054dc5] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 831.599257] env[61868]: DEBUG nova.network.neutron [None req-b417360a-544d-41b8-bee0-eaf5568905b4 tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 831.623291] env[61868]: DEBUG oslo_concurrency.lockutils [None req-395ee023-b17e-4a59-b830-7a86389c0fc3 tempest-ServerDiagnosticsNegativeTest-262233249 tempest-ServerDiagnosticsNegativeTest-262233249-project-member] Lock "24d274d6-04c7-4f4a-941b-31c539054dc5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.708s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 831.651071] env[61868]: DEBUG nova.compute.manager [None req-5472dfff-c8d5-41da-9320-d3ced8109709 tempest-ServersWithSpecificFlavorTestJSON-818828595 tempest-ServersWithSpecificFlavorTestJSON-818828595-project-member] [instance: 0e6a35a2-c4cf-4e4a-9f87-45d0d4c5fead] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 831.682014] env[61868]: DEBUG nova.network.neutron [None req-b417360a-544d-41b8-bee0-eaf5568905b4 tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.687631] env[61868]: DEBUG nova.compute.manager [None req-5472dfff-c8d5-41da-9320-d3ced8109709 tempest-ServersWithSpecificFlavorTestJSON-818828595 tempest-ServersWithSpecificFlavorTestJSON-818828595-project-member] [instance: 0e6a35a2-c4cf-4e4a-9f87-45d0d4c5fead] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 831.692394] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b417360a-544d-41b8-bee0-eaf5568905b4 tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Releasing lock "refresh_cache-05c290e0-e98f-4f66-9e2c-f7d21992bb88" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 831.693055] env[61868]: DEBUG nova.compute.manager [None req-b417360a-544d-41b8-bee0-eaf5568905b4 tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 831.693109] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b417360a-544d-41b8-bee0-eaf5568905b4 tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 831.693981] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aa90be1d-aa75-4eca-81ae-7f80ca1fe0b9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.704049] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81f18fb6-f0e3-4bb1-b0e3-5e1f62aaa57e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.746593] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-b417360a-544d-41b8-bee0-eaf5568905b4 tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 05c290e0-e98f-4f66-9e2c-f7d21992bb88 could not be found. [ 831.746814] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b417360a-544d-41b8-bee0-eaf5568905b4 tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 831.747022] env[61868]: INFO nova.compute.manager [None req-b417360a-544d-41b8-bee0-eaf5568905b4 tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Took 0.05 seconds to destroy the instance on the hypervisor. [ 831.747276] env[61868]: DEBUG oslo.service.loopingcall [None req-b417360a-544d-41b8-bee0-eaf5568905b4 tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 831.752591] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5472dfff-c8d5-41da-9320-d3ced8109709 tempest-ServersWithSpecificFlavorTestJSON-818828595 tempest-ServersWithSpecificFlavorTestJSON-818828595-project-member] Lock "0e6a35a2-c4cf-4e4a-9f87-45d0d4c5fead" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.709s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 831.752883] env[61868]: DEBUG nova.compute.manager [-] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 831.753002] env[61868]: DEBUG nova.network.neutron [-] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 831.771382] env[61868]: DEBUG nova.compute.manager [None req-e0f96591-7ddd-46a5-b530-a359f1da089b tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: a19a14cd-b92d-4b3d-a06a-75b59333d1af] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 831.792619] env[61868]: DEBUG nova.network.neutron [-] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 831.799704] env[61868]: DEBUG nova.network.neutron [-] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.802387] env[61868]: DEBUG nova.compute.manager [None req-e0f96591-7ddd-46a5-b530-a359f1da089b tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: a19a14cd-b92d-4b3d-a06a-75b59333d1af] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 831.808084] env[61868]: INFO nova.compute.manager [-] [instance: 05c290e0-e98f-4f66-9e2c-f7d21992bb88] Took 0.06 seconds to deallocate network for instance. [ 831.829367] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e0f96591-7ddd-46a5-b530-a359f1da089b tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Lock "a19a14cd-b92d-4b3d-a06a-75b59333d1af" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.404s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 831.842867] env[61868]: DEBUG nova.compute.manager [None req-1a3ccf20-aa51-4cab-a664-b563a9fc7ddc tempest-FloatingIPsAssociationTestJSON-873127927 tempest-FloatingIPsAssociationTestJSON-873127927-project-member] [instance: 46a45c39-ab23-4918-9d7e-84093c2b7ce8] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 831.934952] env[61868]: DEBUG nova.compute.manager [None req-1a3ccf20-aa51-4cab-a664-b563a9fc7ddc tempest-FloatingIPsAssociationTestJSON-873127927 tempest-FloatingIPsAssociationTestJSON-873127927-project-member] [instance: 46a45c39-ab23-4918-9d7e-84093c2b7ce8] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 831.962152] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1a3ccf20-aa51-4cab-a664-b563a9fc7ddc tempest-FloatingIPsAssociationTestJSON-873127927 tempest-FloatingIPsAssociationTestJSON-873127927-project-member] Lock "46a45c39-ab23-4918-9d7e-84093c2b7ce8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.156s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 831.977904] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b417360a-544d-41b8-bee0-eaf5568905b4 tempest-ServerExternalEventsTest-1986103716 tempest-ServerExternalEventsTest-1986103716-project-member] Lock "05c290e0-e98f-4f66-9e2c-f7d21992bb88" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.450s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 831.993066] env[61868]: DEBUG nova.compute.manager [None req-d5f069d2-e302-47af-9f35-877fffa7d02d tempest-ServersAdminTestJSON-526196414 tempest-ServersAdminTestJSON-526196414-project-member] [instance: ac5d7c83-1d3c-46b8-af48-4ec6c57ad070] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 832.028843] env[61868]: DEBUG nova.compute.manager [None req-d5f069d2-e302-47af-9f35-877fffa7d02d tempest-ServersAdminTestJSON-526196414 tempest-ServersAdminTestJSON-526196414-project-member] [instance: ac5d7c83-1d3c-46b8-af48-4ec6c57ad070] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 832.071740] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d5f069d2-e302-47af-9f35-877fffa7d02d tempest-ServersAdminTestJSON-526196414 tempest-ServersAdminTestJSON-526196414-project-member] Lock "ac5d7c83-1d3c-46b8-af48-4ec6c57ad070" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.947s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 832.087384] env[61868]: DEBUG nova.compute.manager [None req-5286c1cd-9f99-47a2-876f-aef21f283c0a tempest-ServerDiagnosticsV248Test-873728587 tempest-ServerDiagnosticsV248Test-873728587-project-member] [instance: 9cb259e7-5102-4610-92ba-f30bbbcdbd9c] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 832.122630] env[61868]: DEBUG nova.compute.manager [None req-5286c1cd-9f99-47a2-876f-aef21f283c0a tempest-ServerDiagnosticsV248Test-873728587 tempest-ServerDiagnosticsV248Test-873728587-project-member] [instance: 9cb259e7-5102-4610-92ba-f30bbbcdbd9c] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 832.168436] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5286c1cd-9f99-47a2-876f-aef21f283c0a tempest-ServerDiagnosticsV248Test-873728587 tempest-ServerDiagnosticsV248Test-873728587-project-member] Lock "9cb259e7-5102-4610-92ba-f30bbbcdbd9c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.793s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 832.186785] env[61868]: DEBUG nova.compute.manager [None req-0ba98e26-1c67-411c-9ced-6ae7de9f92b5 tempest-ServersAdminTestJSON-526196414 tempest-ServersAdminTestJSON-526196414-project-member] [instance: 32ef9425-a629-4284-81d2-0dfa848e4420] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 832.226830] env[61868]: DEBUG nova.compute.manager [None req-0ba98e26-1c67-411c-9ced-6ae7de9f92b5 tempest-ServersAdminTestJSON-526196414 tempest-ServersAdminTestJSON-526196414-project-member] [instance: 32ef9425-a629-4284-81d2-0dfa848e4420] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 832.265937] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0ba98e26-1c67-411c-9ced-6ae7de9f92b5 tempest-ServersAdminTestJSON-526196414 tempest-ServersAdminTestJSON-526196414-project-member] Lock "32ef9425-a629-4284-81d2-0dfa848e4420" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.552s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 832.292792] env[61868]: DEBUG nova.compute.manager [None req-54f023d9-dbb2-4d6d-b699-302b19f4af18 tempest-SecurityGroupsTestJSON-1335660091 tempest-SecurityGroupsTestJSON-1335660091-project-member] [instance: 7b2e8196-5ec1-4cef-9183-2d4f43639a47] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 832.325413] env[61868]: DEBUG nova.compute.manager [None req-54f023d9-dbb2-4d6d-b699-302b19f4af18 tempest-SecurityGroupsTestJSON-1335660091 tempest-SecurityGroupsTestJSON-1335660091-project-member] [instance: 7b2e8196-5ec1-4cef-9183-2d4f43639a47] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 832.368574] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54f023d9-dbb2-4d6d-b699-302b19f4af18 tempest-SecurityGroupsTestJSON-1335660091 tempest-SecurityGroupsTestJSON-1335660091-project-member] Lock "7b2e8196-5ec1-4cef-9183-2d4f43639a47" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.975s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 832.382574] env[61868]: DEBUG nova.compute.manager [None req-c98c0f0d-fa8f-4cf9-bf7a-9b3c044c1601 tempest-VolumesAdminNegativeTest-1661419758 tempest-VolumesAdminNegativeTest-1661419758-project-member] [instance: a9f648e5-c026-4bcf-a4b4-81cfbc5532a3] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 832.416969] env[61868]: DEBUG nova.compute.manager [None req-c98c0f0d-fa8f-4cf9-bf7a-9b3c044c1601 tempest-VolumesAdminNegativeTest-1661419758 tempest-VolumesAdminNegativeTest-1661419758-project-member] [instance: a9f648e5-c026-4bcf-a4b4-81cfbc5532a3] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 832.453601] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c98c0f0d-fa8f-4cf9-bf7a-9b3c044c1601 tempest-VolumesAdminNegativeTest-1661419758 tempest-VolumesAdminNegativeTest-1661419758-project-member] Lock "a9f648e5-c026-4bcf-a4b4-81cfbc5532a3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.188s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 832.476207] env[61868]: DEBUG nova.compute.manager [None req-aa931c47-f04b-4254-9c88-7bb57e56c6c5 tempest-ServerMetadataNegativeTestJSON-1980073815 tempest-ServerMetadataNegativeTestJSON-1980073815-project-member] [instance: 7c1bddc6-f5e5-4691-9ef6-e095d4f56ef4] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 832.513979] env[61868]: DEBUG nova.compute.manager [None req-aa931c47-f04b-4254-9c88-7bb57e56c6c5 tempest-ServerMetadataNegativeTestJSON-1980073815 tempest-ServerMetadataNegativeTestJSON-1980073815-project-member] [instance: 7c1bddc6-f5e5-4691-9ef6-e095d4f56ef4] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 832.571849] env[61868]: DEBUG oslo_concurrency.lockutils [None req-aa931c47-f04b-4254-9c88-7bb57e56c6c5 tempest-ServerMetadataNegativeTestJSON-1980073815 tempest-ServerMetadataNegativeTestJSON-1980073815-project-member] Lock "7c1bddc6-f5e5-4691-9ef6-e095d4f56ef4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.732s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 832.592139] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] Acquiring lock "98afcb58-e992-469d-a8fd-94c5eaf69b31" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 832.592139] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] Lock "98afcb58-e992-469d-a8fd-94c5eaf69b31" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 832.615700] env[61868]: DEBUG nova.compute.manager [None req-1e9e2edd-7dd7-45cb-bf19-a0476ea30251 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 80be1533-08c9-4a44-975b-90ed5ac5402a] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 832.658796] env[61868]: DEBUG nova.compute.manager [None req-1e9e2edd-7dd7-45cb-bf19-a0476ea30251 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 80be1533-08c9-4a44-975b-90ed5ac5402a] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 832.691767] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1e9e2edd-7dd7-45cb-bf19-a0476ea30251 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "80be1533-08c9-4a44-975b-90ed5ac5402a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.078s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 832.708691] env[61868]: DEBUG nova.compute.manager [None req-4014b24a-6e17-4676-883e-f97ef382734d tempest-ServersTestFqdnHostnames-466634136 tempest-ServersTestFqdnHostnames-466634136-project-member] [instance: 727c1245-a258-4b71-93bf-10977d80b3ff] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 832.744901] env[61868]: DEBUG nova.compute.manager [None req-4014b24a-6e17-4676-883e-f97ef382734d tempest-ServersTestFqdnHostnames-466634136 tempest-ServersTestFqdnHostnames-466634136-project-member] [instance: 727c1245-a258-4b71-93bf-10977d80b3ff] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 832.771248] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4014b24a-6e17-4676-883e-f97ef382734d tempest-ServersTestFqdnHostnames-466634136 tempest-ServersTestFqdnHostnames-466634136-project-member] Lock "727c1245-a258-4b71-93bf-10977d80b3ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.345s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 832.785120] env[61868]: DEBUG nova.compute.manager [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 832.843482] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 832.843753] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 832.845435] env[61868]: INFO nova.compute.claims [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 833.290649] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e048d3-85de-442d-b05a-01e969a64dc7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.298759] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18fa0a88-8591-48ce-8610-b841a584478a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.331353] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a281b87-429c-4545-960a-5875fb7f219e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.340103] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b994a415-0a9b-4b26-903c-45ddeff5baeb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.356163] env[61868]: DEBUG nova.compute.provider_tree [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 833.365722] env[61868]: DEBUG nova.scheduler.client.report [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 833.402200] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.558s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 833.402489] env[61868]: DEBUG nova.compute.manager [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 833.464425] env[61868]: DEBUG nova.compute.utils [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 833.466280] env[61868]: DEBUG nova.compute.manager [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 833.466454] env[61868]: DEBUG nova.network.neutron [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 833.480476] env[61868]: DEBUG nova.compute.manager [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 833.556776] env[61868]: DEBUG nova.policy [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f43bbfc89e854ba5ae03f03e1a98c155', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '39fd476d0f164b4695fe920d42018521', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 833.605748] env[61868]: DEBUG nova.compute.manager [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 833.632408] env[61868]: DEBUG nova.virt.hardware [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 833.632743] env[61868]: DEBUG nova.virt.hardware [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 833.632914] env[61868]: DEBUG nova.virt.hardware [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 833.633097] env[61868]: DEBUG nova.virt.hardware [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 833.633242] env[61868]: DEBUG nova.virt.hardware [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 833.633385] env[61868]: DEBUG nova.virt.hardware [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 833.633595] env[61868]: DEBUG nova.virt.hardware [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 833.633773] env[61868]: DEBUG nova.virt.hardware [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 833.634047] env[61868]: DEBUG nova.virt.hardware [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 833.634116] env[61868]: DEBUG nova.virt.hardware [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 833.634271] env[61868]: DEBUG nova.virt.hardware [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 833.635164] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96d6af16-da9b-40ae-b498-5377ea33f05d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.645370] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41381847-0bd8-41d6-87d0-bcc37b05afac {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.123911] env[61868]: DEBUG oslo_concurrency.lockutils [None req-be2532e6-53db-462e-9d99-f7efa5c5c033 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "f630ce29-dcb1-4f1c-9dc4-0cb246e6f1cb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 834.124198] env[61868]: DEBUG oslo_concurrency.lockutils [None req-be2532e6-53db-462e-9d99-f7efa5c5c033 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "f630ce29-dcb1-4f1c-9dc4-0cb246e6f1cb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 834.295776] env[61868]: DEBUG nova.network.neutron [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Successfully created port: c26c1295-f169-4877-b05c-c78c678b1481 {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 835.883569] env[61868]: DEBUG nova.network.neutron [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Successfully updated port: c26c1295-f169-4877-b05c-c78c678b1481 {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 835.895598] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquiring lock "refresh_cache-972ab1c7-03b0-4294-930c-8084674083ba" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 835.895598] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquired lock "refresh_cache-972ab1c7-03b0-4294-930c-8084674083ba" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 835.895750] env[61868]: DEBUG nova.network.neutron [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 836.218717] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9aac3b3d-511a-4120-b574-64f985810788 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquiring lock "972ab1c7-03b0-4294-930c-8084674083ba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 836.236206] env[61868]: DEBUG nova.network.neutron [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 836.242827] env[61868]: DEBUG nova.compute.manager [req-9fd0826c-8686-4d10-8e8b-09bcbbfe4422 req-5c2e8803-a3a3-421a-a38a-2af61c9a8cec service nova] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Received event network-vif-plugged-c26c1295-f169-4877-b05c-c78c678b1481 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 836.243449] env[61868]: DEBUG oslo_concurrency.lockutils [req-9fd0826c-8686-4d10-8e8b-09bcbbfe4422 req-5c2e8803-a3a3-421a-a38a-2af61c9a8cec service nova] Acquiring lock "972ab1c7-03b0-4294-930c-8084674083ba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 836.243555] env[61868]: DEBUG oslo_concurrency.lockutils [req-9fd0826c-8686-4d10-8e8b-09bcbbfe4422 req-5c2e8803-a3a3-421a-a38a-2af61c9a8cec service nova] Lock "972ab1c7-03b0-4294-930c-8084674083ba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 836.243835] env[61868]: DEBUG oslo_concurrency.lockutils [req-9fd0826c-8686-4d10-8e8b-09bcbbfe4422 req-5c2e8803-a3a3-421a-a38a-2af61c9a8cec service nova] Lock "972ab1c7-03b0-4294-930c-8084674083ba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 836.244162] env[61868]: DEBUG nova.compute.manager [req-9fd0826c-8686-4d10-8e8b-09bcbbfe4422 req-5c2e8803-a3a3-421a-a38a-2af61c9a8cec service nova] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] No waiting events found dispatching network-vif-plugged-c26c1295-f169-4877-b05c-c78c678b1481 {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 836.244451] env[61868]: WARNING nova.compute.manager [req-9fd0826c-8686-4d10-8e8b-09bcbbfe4422 req-5c2e8803-a3a3-421a-a38a-2af61c9a8cec service nova] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Received unexpected event network-vif-plugged-c26c1295-f169-4877-b05c-c78c678b1481 for instance with vm_state building and task_state deleting. [ 836.794631] env[61868]: DEBUG nova.network.neutron [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Updating instance_info_cache with network_info: [{"id": "c26c1295-f169-4877-b05c-c78c678b1481", "address": "fa:16:3e:16:dd:22", "network": {"id": "b71fb302-0271-4727-ba20-75991d2de70e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1625994717-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "39fd476d0f164b4695fe920d42018521", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc26c1295-f1", "ovs_interfaceid": "c26c1295-f169-4877-b05c-c78c678b1481", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.817542] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Releasing lock "refresh_cache-972ab1c7-03b0-4294-930c-8084674083ba" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 836.817880] env[61868]: DEBUG nova.compute.manager [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Instance network_info: |[{"id": "c26c1295-f169-4877-b05c-c78c678b1481", "address": "fa:16:3e:16:dd:22", "network": {"id": "b71fb302-0271-4727-ba20-75991d2de70e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1625994717-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "39fd476d0f164b4695fe920d42018521", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc26c1295-f1", "ovs_interfaceid": "c26c1295-f169-4877-b05c-c78c678b1481", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 836.818334] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:dd:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5b8af79a-31d5-4d78-93d7-3919aa1d9186', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c26c1295-f169-4877-b05c-c78c678b1481', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 836.828509] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Creating folder: Project (39fd476d0f164b4695fe920d42018521). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 836.829309] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-218117b8-b06a-4695-96bf-99107bff73b8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.844053] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Created folder: Project (39fd476d0f164b4695fe920d42018521) in parent group-v18181. [ 836.844287] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Creating folder: Instances. Parent ref: group-v18233. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 836.844553] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fca66a1f-c9f1-452f-876d-8658ba9dface {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.860034] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Created folder: Instances in parent group-v18233. [ 836.860034] env[61868]: DEBUG oslo.service.loopingcall [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 836.860034] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 836.860034] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6d685b31-a181-4299-97d1-15b2a65481af {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.895592] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 836.895592] env[61868]: value = "task-40983" [ 836.895592] env[61868]: _type = "Task" [ 836.895592] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.904912] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40983, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.405254] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40983, 'name': CreateVM_Task, 'duration_secs': 0.490874} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.405498] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 837.406311] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 837.406532] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 837.409364] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ef48fe6-6a12-4c84-935b-2e06798a7ac1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.446140] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Reconfiguring VM instance to enable vnc on port - 5904 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 837.446689] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-139eef86-e019-4a4f-a6b9-101d1e502c6f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.464234] env[61868]: DEBUG oslo_vmware.api [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Waiting for the task: (returnval){ [ 837.464234] env[61868]: value = "task-40985" [ 837.464234] env[61868]: _type = "Task" [ 837.464234] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.473737] env[61868]: DEBUG oslo_vmware.api [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Task: {'id': task-40985, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.923972] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "bb3714cc-7f34-4a94-b682-aefda6f48ed3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 837.924309] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "bb3714cc-7f34-4a94-b682-aefda6f48ed3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 837.974352] env[61868]: DEBUG oslo_vmware.api [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Task: {'id': task-40985, 'name': ReconfigVM_Task, 'duration_secs': 0.117623} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.974677] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Reconfigured VM instance to enable vnc on port - 5904 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 837.974923] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.568s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 837.975214] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 837.975403] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 837.975752] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 837.976061] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-383f554e-6109-4caf-a8cb-e01184dfa4a9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.981024] env[61868]: DEBUG oslo_vmware.api [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Waiting for the task: (returnval){ [ 837.981024] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52f6225b-606e-2982-860e-976c0833636b" [ 837.981024] env[61868]: _type = "Task" [ 837.981024] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.989508] env[61868]: DEBUG oslo_vmware.api [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52f6225b-606e-2982-860e-976c0833636b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.491859] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 838.492165] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 838.492390] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 840.116464] env[61868]: DEBUG nova.compute.manager [req-c50e34aa-9577-48fc-b4f8-26e2025735e3 req-29a55ed0-4e7f-497a-9c00-7e44e4a24924 service nova] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Received event network-changed-c26c1295-f169-4877-b05c-c78c678b1481 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 840.116762] env[61868]: DEBUG nova.compute.manager [req-c50e34aa-9577-48fc-b4f8-26e2025735e3 req-29a55ed0-4e7f-497a-9c00-7e44e4a24924 service nova] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Refreshing instance network info cache due to event network-changed-c26c1295-f169-4877-b05c-c78c678b1481. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 840.116896] env[61868]: DEBUG oslo_concurrency.lockutils [req-c50e34aa-9577-48fc-b4f8-26e2025735e3 req-29a55ed0-4e7f-497a-9c00-7e44e4a24924 service nova] Acquiring lock "refresh_cache-972ab1c7-03b0-4294-930c-8084674083ba" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 840.117093] env[61868]: DEBUG oslo_concurrency.lockutils [req-c50e34aa-9577-48fc-b4f8-26e2025735e3 req-29a55ed0-4e7f-497a-9c00-7e44e4a24924 service nova] Acquired lock "refresh_cache-972ab1c7-03b0-4294-930c-8084674083ba" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 840.117364] env[61868]: DEBUG nova.network.neutron [req-c50e34aa-9577-48fc-b4f8-26e2025735e3 req-29a55ed0-4e7f-497a-9c00-7e44e4a24924 service nova] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Refreshing network info cache for port c26c1295-f169-4877-b05c-c78c678b1481 {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 840.876494] env[61868]: DEBUG nova.network.neutron [req-c50e34aa-9577-48fc-b4f8-26e2025735e3 req-29a55ed0-4e7f-497a-9c00-7e44e4a24924 service nova] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Updated VIF entry in instance network info cache for port c26c1295-f169-4877-b05c-c78c678b1481. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 840.876855] env[61868]: DEBUG nova.network.neutron [req-c50e34aa-9577-48fc-b4f8-26e2025735e3 req-29a55ed0-4e7f-497a-9c00-7e44e4a24924 service nova] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Updating instance_info_cache with network_info: [{"id": "c26c1295-f169-4877-b05c-c78c678b1481", "address": "fa:16:3e:16:dd:22", "network": {"id": "b71fb302-0271-4727-ba20-75991d2de70e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1625994717-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "39fd476d0f164b4695fe920d42018521", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc26c1295-f1", "ovs_interfaceid": "c26c1295-f169-4877-b05c-c78c678b1481", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.887020] env[61868]: DEBUG oslo_concurrency.lockutils [req-c50e34aa-9577-48fc-b4f8-26e2025735e3 req-29a55ed0-4e7f-497a-9c00-7e44e4a24924 service nova] Releasing lock "refresh_cache-972ab1c7-03b0-4294-930c-8084674083ba" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 843.867735] env[61868]: DEBUG oslo_concurrency.lockutils [None req-55e1f196-e08b-4869-bb81-05aaddbc2d3c tempest-FloatingIPsAssociationNegativeTestJSON-2012629252 tempest-FloatingIPsAssociationNegativeTestJSON-2012629252-project-member] Acquiring lock "0ef31ff5-f19f-44b0-abb2-b92c76ee9b15" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 843.868202] env[61868]: DEBUG oslo_concurrency.lockutils [None req-55e1f196-e08b-4869-bb81-05aaddbc2d3c tempest-FloatingIPsAssociationNegativeTestJSON-2012629252 tempest-FloatingIPsAssociationNegativeTestJSON-2012629252-project-member] Lock "0ef31ff5-f19f-44b0-abb2-b92c76ee9b15" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 844.626604] env[61868]: DEBUG oslo_concurrency.lockutils [None req-06af30fa-5c66-4634-80e9-e2df74027eb2 tempest-AttachInterfacesUnderV243Test-1999617247 tempest-AttachInterfacesUnderV243Test-1999617247-project-member] Acquiring lock "02387068-d9d8-4a13-b819-e1b9ef57a5d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 844.626914] env[61868]: DEBUG oslo_concurrency.lockutils [None req-06af30fa-5c66-4634-80e9-e2df74027eb2 tempest-AttachInterfacesUnderV243Test-1999617247 tempest-AttachInterfacesUnderV243Test-1999617247-project-member] Lock "02387068-d9d8-4a13-b819-e1b9ef57a5d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 850.389984] env[61868]: DEBUG oslo_concurrency.lockutils [None req-03d2d008-c03c-4f42-96fd-45e52bd1c08d tempest-SecurityGroupsTestJSON-1335660091 tempest-SecurityGroupsTestJSON-1335660091-project-member] Acquiring lock "7958ae5c-6db5-4491-8e32-cfd2cab686d3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 850.390293] env[61868]: DEBUG oslo_concurrency.lockutils [None req-03d2d008-c03c-4f42-96fd-45e52bd1c08d tempest-SecurityGroupsTestJSON-1335660091 tempest-SecurityGroupsTestJSON-1335660091-project-member] Lock "7958ae5c-6db5-4491-8e32-cfd2cab686d3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 850.480524] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a4a72449-fbeb-40e3-a944-4ba957282988 tempest-ImagesOneServerTestJSON-1138467028 tempest-ImagesOneServerTestJSON-1138467028-project-member] Acquiring lock "62c7b41d-1426-408b-a650-4f567171256a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 850.480841] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a4a72449-fbeb-40e3-a944-4ba957282988 tempest-ImagesOneServerTestJSON-1138467028 tempest-ImagesOneServerTestJSON-1138467028-project-member] Lock "62c7b41d-1426-408b-a650-4f567171256a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 852.538392] env[61868]: DEBUG oslo_concurrency.lockutils [None req-48b104b9-ac6d-42ee-87e0-02e3e28595b8 tempest-TaggedBootDevicesTest_v242-294738673 tempest-TaggedBootDevicesTest_v242-294738673-project-member] Acquiring lock "ad906b04-0b02-444a-b837-acde1c22df43" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 852.538803] env[61868]: DEBUG oslo_concurrency.lockutils [None req-48b104b9-ac6d-42ee-87e0-02e3e28595b8 tempest-TaggedBootDevicesTest_v242-294738673 tempest-TaggedBootDevicesTest_v242-294738673-project-member] Lock "ad906b04-0b02-444a-b837-acde1c22df43" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 853.480473] env[61868]: DEBUG oslo_concurrency.lockutils [None req-86c442aa-c83d-4fea-a456-6b3c2ce5597b tempest-ListImageFiltersTestJSON-801545188 tempest-ListImageFiltersTestJSON-801545188-project-member] Acquiring lock "bf46241a-b11c-46e7-b463-c48bc83c8ab2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 853.480748] env[61868]: DEBUG oslo_concurrency.lockutils [None req-86c442aa-c83d-4fea-a456-6b3c2ce5597b tempest-ListImageFiltersTestJSON-801545188 tempest-ListImageFiltersTestJSON-801545188-project-member] Lock "bf46241a-b11c-46e7-b463-c48bc83c8ab2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 853.983679] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1ca8d454-42a0-4e1e-ae4c-d5eb4229b8da tempest-ListImageFiltersTestJSON-801545188 tempest-ListImageFiltersTestJSON-801545188-project-member] Acquiring lock "a085b185-6663-460d-b2c8-9acee0a89311" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 853.984075] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1ca8d454-42a0-4e1e-ae4c-d5eb4229b8da tempest-ListImageFiltersTestJSON-801545188 tempest-ListImageFiltersTestJSON-801545188-project-member] Lock "a085b185-6663-460d-b2c8-9acee0a89311" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 854.386206] env[61868]: DEBUG oslo_concurrency.lockutils [None req-30236137-5264-49e2-96b4-5c2bf3dbf2e7 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "70ad4510-e8e3-4c1a-a58d-d76822248b9a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 854.386438] env[61868]: DEBUG oslo_concurrency.lockutils [None req-30236137-5264-49e2-96b4-5c2bf3dbf2e7 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "70ad4510-e8e3-4c1a-a58d-d76822248b9a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 855.413202] env[61868]: DEBUG oslo_concurrency.lockutils [None req-02353919-5328-4d42-b719-185c973ab754 tempest-AttachVolumeShelveTestJSON-1537042233 tempest-AttachVolumeShelveTestJSON-1537042233-project-member] Acquiring lock "57439eb9-5e3d-49e7-a634-24cb78d86c99" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 855.413449] env[61868]: DEBUG oslo_concurrency.lockutils [None req-02353919-5328-4d42-b719-185c973ab754 tempest-AttachVolumeShelveTestJSON-1537042233 tempest-AttachVolumeShelveTestJSON-1537042233-project-member] Lock "57439eb9-5e3d-49e7-a634-24cb78d86c99" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 857.310124] env[61868]: DEBUG oslo_concurrency.lockutils [None req-037fe429-ed0c-4ba7-a582-82bf5ef41e71 tempest-AttachInterfacesTestJSON-327680650 tempest-AttachInterfacesTestJSON-327680650-project-member] Acquiring lock "9a7f9e74-a298-4eee-bf31-153d671ab91a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 857.310422] env[61868]: DEBUG oslo_concurrency.lockutils [None req-037fe429-ed0c-4ba7-a582-82bf5ef41e71 tempest-AttachInterfacesTestJSON-327680650 tempest-AttachInterfacesTestJSON-327680650-project-member] Lock "9a7f9e74-a298-4eee-bf31-153d671ab91a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 878.349788] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 879.414380] env[61868]: WARNING oslo_vmware.rw_handles [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 879.414380] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 879.414380] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 879.414380] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 879.414380] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 879.414380] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 879.414380] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 879.414380] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 879.414380] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 879.414380] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 879.414380] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 879.414380] env[61868]: ERROR oslo_vmware.rw_handles [ 879.415222] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/6fbae0ef-f049-4da1-bf76-c201e0da09d6/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 879.416760] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 879.416970] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Copying Virtual Disk [datastore2] vmware_temp/6fbae0ef-f049-4da1-bf76-c201e0da09d6/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/6fbae0ef-f049-4da1-bf76-c201e0da09d6/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 879.417250] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-008b7761-8d8b-4eef-a38c-bde3f99a52cd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.428029] env[61868]: DEBUG oslo_vmware.api [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Waiting for the task: (returnval){ [ 879.428029] env[61868]: value = "task-40987" [ 879.428029] env[61868]: _type = "Task" [ 879.428029] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.434521] env[61868]: DEBUG oslo_vmware.api [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Task: {'id': task-40987, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.937173] env[61868]: DEBUG oslo_vmware.exceptions [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 879.937490] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 879.938114] env[61868]: ERROR nova.compute.manager [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 879.938114] env[61868]: Faults: ['InvalidArgument'] [ 879.938114] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Traceback (most recent call last): [ 879.938114] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 879.938114] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] yield resources [ 879.938114] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 879.938114] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] self.driver.spawn(context, instance, image_meta, [ 879.938114] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 879.938114] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] self._vmops.spawn(context, instance, image_meta, injected_files, [ 879.938114] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 879.938114] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] self._fetch_image_if_missing(context, vi) [ 879.938114] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 879.938555] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] image_cache(vi, tmp_image_ds_loc) [ 879.938555] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 879.938555] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] vm_util.copy_virtual_disk( [ 879.938555] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 879.938555] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] session._wait_for_task(vmdk_copy_task) [ 879.938555] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 879.938555] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] return self.wait_for_task(task_ref) [ 879.938555] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 879.938555] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] return evt.wait() [ 879.938555] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 879.938555] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] result = hub.switch() [ 879.938555] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 879.938555] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] return self.greenlet.switch() [ 879.939039] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 879.939039] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] self.f(*self.args, **self.kw) [ 879.939039] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 879.939039] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] raise exceptions.translate_fault(task_info.error) [ 879.939039] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 879.939039] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Faults: ['InvalidArgument'] [ 879.939039] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] [ 879.939039] env[61868]: INFO nova.compute.manager [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Terminating instance [ 879.940119] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 879.940339] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 879.940569] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-33985a92-6df8-44f9-9fe9-9db5beae89be {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.942807] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Acquiring lock "refresh_cache-c1efc2dd-6474-4fba-a00e-f104f0d446de" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 879.942963] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Acquired lock "refresh_cache-c1efc2dd-6474-4fba-a00e-f104f0d446de" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 879.943130] env[61868]: DEBUG nova.network.neutron [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 879.950843] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 879.950944] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 879.951724] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f3db795-9183-448b-bd83-08c99a5dd901 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.959274] env[61868]: DEBUG oslo_vmware.api [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Waiting for the task: (returnval){ [ 879.959274] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52c07132-a88b-e08f-6844-4952570b85a4" [ 879.959274] env[61868]: _type = "Task" [ 879.959274] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.968525] env[61868]: DEBUG oslo_vmware.api [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52c07132-a88b-e08f-6844-4952570b85a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.979390] env[61868]: DEBUG nova.network.neutron [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 880.005576] env[61868]: DEBUG nova.network.neutron [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.015581] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Releasing lock "refresh_cache-c1efc2dd-6474-4fba-a00e-f104f0d446de" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 880.016012] env[61868]: DEBUG nova.compute.manager [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 880.016219] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 880.017396] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-063b93e1-596c-4635-a06f-39115aa7e635 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.027046] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 880.027298] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c14255b6-1e29-45b3-8daa-e780976d3ad9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.059846] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 880.060378] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 880.060571] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Deleting the datastore file [datastore2] c1efc2dd-6474-4fba-a00e-f104f0d446de {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 880.060879] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-25e8c9e2-0725-4dc7-a56f-ea559a9cfa5c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.067567] env[61868]: DEBUG oslo_vmware.api [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Waiting for the task: (returnval){ [ 880.067567] env[61868]: value = "task-40989" [ 880.067567] env[61868]: _type = "Task" [ 880.067567] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.078227] env[61868]: DEBUG oslo_vmware.api [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Task: {'id': task-40989, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.473348] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 880.474023] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Creating directory with path [datastore2] vmware_temp/7bb6b264-5879-4073-a4ef-1f46b395a758/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 880.474416] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e8a7a199-4bd5-40af-8ea2-453512f31112 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.487121] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Created directory with path [datastore2] vmware_temp/7bb6b264-5879-4073-a4ef-1f46b395a758/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 880.487617] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Fetch image to [datastore2] vmware_temp/7bb6b264-5879-4073-a4ef-1f46b395a758/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 880.487963] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/7bb6b264-5879-4073-a4ef-1f46b395a758/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 880.488909] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bf2266e-1629-4836-8f62-6c1d11786d70 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.496280] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f1a1cba-3714-47cc-8061-de8ffcb834c4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.506783] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49fe055e-55f6-4b02-adc2-7ec1843ca425 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.540574] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f183c08-f3c5-4e7c-a02d-d2b92534a060 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.547168] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-94db6484-302f-4e42-b85d-794940de43b8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.569404] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 880.580698] env[61868]: DEBUG oslo_vmware.api [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Task: {'id': task-40989, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.043832} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.581136] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 880.581931] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 880.582192] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 880.582354] env[61868]: INFO nova.compute.manager [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Took 0.57 seconds to destroy the instance on the hypervisor. [ 880.582602] env[61868]: DEBUG oslo.service.loopingcall [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 880.582911] env[61868]: DEBUG nova.compute.manager [-] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 880.583020] env[61868]: DEBUG nova.network.neutron [-] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 880.603597] env[61868]: DEBUG nova.network.neutron [-] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 880.611503] env[61868]: DEBUG nova.network.neutron [-] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.619470] env[61868]: DEBUG oslo_vmware.rw_handles [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7bb6b264-5879-4073-a4ef-1f46b395a758/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 880.621977] env[61868]: INFO nova.compute.manager [-] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Took 0.04 seconds to deallocate network for instance. [ 880.625027] env[61868]: DEBUG nova.compute.claims [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 880.625027] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 880.625227] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 880.690428] env[61868]: DEBUG oslo_vmware.rw_handles [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 880.690870] env[61868]: DEBUG oslo_vmware.rw_handles [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7bb6b264-5879-4073-a4ef-1f46b395a758/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 881.042709] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43675165-459a-479c-be13-c97b99f4c40e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.050745] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c1782b7-a835-44da-99ad-f99cc836bfa7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.082116] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2281461b-e586-4482-95c5-247132744e81 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.090416] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-252643d4-ff55-4df0-b65a-bf3ba1bbc038 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.104181] env[61868]: DEBUG nova.compute.provider_tree [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 881.113357] env[61868]: DEBUG nova.scheduler.client.report [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 881.133318] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.508s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 881.133892] env[61868]: ERROR nova.compute.manager [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 881.133892] env[61868]: Faults: ['InvalidArgument'] [ 881.133892] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Traceback (most recent call last): [ 881.133892] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 881.133892] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] self.driver.spawn(context, instance, image_meta, [ 881.133892] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 881.133892] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] self._vmops.spawn(context, instance, image_meta, injected_files, [ 881.133892] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 881.133892] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] self._fetch_image_if_missing(context, vi) [ 881.133892] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 881.133892] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] image_cache(vi, tmp_image_ds_loc) [ 881.133892] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 881.134352] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] vm_util.copy_virtual_disk( [ 881.134352] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 881.134352] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] session._wait_for_task(vmdk_copy_task) [ 881.134352] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 881.134352] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] return self.wait_for_task(task_ref) [ 881.134352] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 881.134352] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] return evt.wait() [ 881.134352] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 881.134352] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] result = hub.switch() [ 881.134352] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 881.134352] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] return self.greenlet.switch() [ 881.134352] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 881.134352] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] self.f(*self.args, **self.kw) [ 881.134769] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 881.134769] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] raise exceptions.translate_fault(task_info.error) [ 881.134769] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 881.134769] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Faults: ['InvalidArgument'] [ 881.134769] env[61868]: ERROR nova.compute.manager [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] [ 881.134769] env[61868]: DEBUG nova.compute.utils [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 881.136139] env[61868]: DEBUG nova.compute.manager [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Build of instance c1efc2dd-6474-4fba-a00e-f104f0d446de was re-scheduled: A specified parameter was not correct: fileType [ 881.136139] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 881.136541] env[61868]: DEBUG nova.compute.manager [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 881.137025] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Acquiring lock "refresh_cache-c1efc2dd-6474-4fba-a00e-f104f0d446de" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 881.137137] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Acquired lock "refresh_cache-c1efc2dd-6474-4fba-a00e-f104f0d446de" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 881.137511] env[61868]: DEBUG nova.network.neutron [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 881.163823] env[61868]: DEBUG nova.network.neutron [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 881.189128] env[61868]: DEBUG nova.network.neutron [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.198391] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Releasing lock "refresh_cache-c1efc2dd-6474-4fba-a00e-f104f0d446de" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 881.198474] env[61868]: DEBUG nova.compute.manager [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 881.198638] env[61868]: DEBUG nova.compute.manager [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 881.198798] env[61868]: DEBUG nova.network.neutron [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 881.218368] env[61868]: DEBUG nova.network.neutron [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 881.226449] env[61868]: DEBUG nova.network.neutron [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.234769] env[61868]: INFO nova.compute.manager [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Took 0.04 seconds to deallocate network for instance. [ 881.336529] env[61868]: INFO nova.scheduler.client.report [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Deleted allocations for instance c1efc2dd-6474-4fba-a00e-f104f0d446de [ 881.357884] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b895d21-c392-4fa6-a878-a5eba65b4087 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Lock "c1efc2dd-6474-4fba-a00e-f104f0d446de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 291.780s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 881.359284] env[61868]: DEBUG oslo_concurrency.lockutils [None req-519c3c40-2ba9-4cf1-9858-a12ac3da57cb tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Lock "c1efc2dd-6474-4fba-a00e-f104f0d446de" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 93.445s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 881.359504] env[61868]: DEBUG oslo_concurrency.lockutils [None req-519c3c40-2ba9-4cf1-9858-a12ac3da57cb tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Acquiring lock "c1efc2dd-6474-4fba-a00e-f104f0d446de-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 881.359711] env[61868]: DEBUG oslo_concurrency.lockutils [None req-519c3c40-2ba9-4cf1-9858-a12ac3da57cb tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Lock "c1efc2dd-6474-4fba-a00e-f104f0d446de-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 881.359882] env[61868]: DEBUG oslo_concurrency.lockutils [None req-519c3c40-2ba9-4cf1-9858-a12ac3da57cb tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Lock "c1efc2dd-6474-4fba-a00e-f104f0d446de-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 881.362339] env[61868]: INFO nova.compute.manager [None req-519c3c40-2ba9-4cf1-9858-a12ac3da57cb tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Terminating instance [ 881.364251] env[61868]: DEBUG oslo_concurrency.lockutils [None req-519c3c40-2ba9-4cf1-9858-a12ac3da57cb tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Acquiring lock "refresh_cache-c1efc2dd-6474-4fba-a00e-f104f0d446de" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 881.364405] env[61868]: DEBUG oslo_concurrency.lockutils [None req-519c3c40-2ba9-4cf1-9858-a12ac3da57cb tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Acquired lock "refresh_cache-c1efc2dd-6474-4fba-a00e-f104f0d446de" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 881.364572] env[61868]: DEBUG nova.network.neutron [None req-519c3c40-2ba9-4cf1-9858-a12ac3da57cb tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 881.391089] env[61868]: DEBUG nova.compute.manager [None req-d98d5c1d-1870-4531-98e2-6c0ae2d7f177 tempest-ImagesOneServerNegativeTestJSON-914211067 tempest-ImagesOneServerNegativeTestJSON-914211067-project-member] [instance: 18918990-c7b2-40b4-9683-ae0635fcc367] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 881.394274] env[61868]: DEBUG nova.network.neutron [None req-519c3c40-2ba9-4cf1-9858-a12ac3da57cb tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 881.424974] env[61868]: DEBUG nova.compute.manager [None req-d98d5c1d-1870-4531-98e2-6c0ae2d7f177 tempest-ImagesOneServerNegativeTestJSON-914211067 tempest-ImagesOneServerNegativeTestJSON-914211067-project-member] [instance: 18918990-c7b2-40b4-9683-ae0635fcc367] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 881.443033] env[61868]: DEBUG nova.network.neutron [None req-519c3c40-2ba9-4cf1-9858-a12ac3da57cb tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.456117] env[61868]: DEBUG oslo_concurrency.lockutils [None req-519c3c40-2ba9-4cf1-9858-a12ac3da57cb tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Releasing lock "refresh_cache-c1efc2dd-6474-4fba-a00e-f104f0d446de" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 881.456117] env[61868]: DEBUG nova.compute.manager [None req-519c3c40-2ba9-4cf1-9858-a12ac3da57cb tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 881.456117] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-519c3c40-2ba9-4cf1-9858-a12ac3da57cb tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 881.456117] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aab02192-05f4-4d86-a8c9-9b0935e98c24 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.459079] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d98d5c1d-1870-4531-98e2-6c0ae2d7f177 tempest-ImagesOneServerNegativeTestJSON-914211067 tempest-ImagesOneServerNegativeTestJSON-914211067-project-member] Lock "18918990-c7b2-40b4-9683-ae0635fcc367" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 243.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 881.468273] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8f9e84f-bebb-4d74-ad8e-5c4de51a8fab {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.479922] env[61868]: DEBUG nova.compute.manager [None req-a1dc6426-1e5e-48e4-af17-04227f7d0f9b tempest-AttachVolumeShelveTestJSON-1537042233 tempest-AttachVolumeShelveTestJSON-1537042233-project-member] [instance: 34945829-cc74-4bae-9af7-99504a130e2b] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 881.502407] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-519c3c40-2ba9-4cf1-9858-a12ac3da57cb tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c1efc2dd-6474-4fba-a00e-f104f0d446de could not be found. [ 881.502629] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-519c3c40-2ba9-4cf1-9858-a12ac3da57cb tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 881.502813] env[61868]: INFO nova.compute.manager [None req-519c3c40-2ba9-4cf1-9858-a12ac3da57cb tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Took 0.05 seconds to destroy the instance on the hypervisor. [ 881.503057] env[61868]: DEBUG oslo.service.loopingcall [None req-519c3c40-2ba9-4cf1-9858-a12ac3da57cb tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 881.503290] env[61868]: DEBUG nova.compute.manager [-] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 881.503385] env[61868]: DEBUG nova.network.neutron [-] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 881.508176] env[61868]: DEBUG nova.compute.manager [None req-a1dc6426-1e5e-48e4-af17-04227f7d0f9b tempest-AttachVolumeShelveTestJSON-1537042233 tempest-AttachVolumeShelveTestJSON-1537042233-project-member] [instance: 34945829-cc74-4bae-9af7-99504a130e2b] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 881.524902] env[61868]: DEBUG nova.network.neutron [-] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 881.532676] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a1dc6426-1e5e-48e4-af17-04227f7d0f9b tempest-AttachVolumeShelveTestJSON-1537042233 tempest-AttachVolumeShelveTestJSON-1537042233-project-member] Lock "34945829-cc74-4bae-9af7-99504a130e2b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.332s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 881.535008] env[61868]: DEBUG nova.network.neutron [-] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.546156] env[61868]: DEBUG nova.compute.manager [None req-931bc777-7667-4ab3-b0f1-bd4defb8f838 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 62c9fb4b-cea9-4ab4-bef9-cd3d80bf5df5] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 881.549374] env[61868]: INFO nova.compute.manager [-] [instance: c1efc2dd-6474-4fba-a00e-f104f0d446de] Took 0.05 seconds to deallocate network for instance. [ 881.582789] env[61868]: DEBUG nova.compute.manager [None req-931bc777-7667-4ab3-b0f1-bd4defb8f838 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 62c9fb4b-cea9-4ab4-bef9-cd3d80bf5df5] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 881.612173] env[61868]: DEBUG oslo_concurrency.lockutils [None req-931bc777-7667-4ab3-b0f1-bd4defb8f838 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "62c9fb4b-cea9-4ab4-bef9-cd3d80bf5df5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 226.520s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 881.635635] env[61868]: DEBUG nova.compute.manager [None req-832b114b-a72a-48a9-b479-c761019aa7a5 tempest-AttachInterfacesTestJSON-327680650 tempest-AttachInterfacesTestJSON-327680650-project-member] [instance: 8cfd5561-08f1-49b8-b518-73104a987fc9] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 881.679005] env[61868]: DEBUG nova.compute.manager [None req-832b114b-a72a-48a9-b479-c761019aa7a5 tempest-AttachInterfacesTestJSON-327680650 tempest-AttachInterfacesTestJSON-327680650-project-member] [instance: 8cfd5561-08f1-49b8-b518-73104a987fc9] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 881.695852] env[61868]: DEBUG oslo_concurrency.lockutils [None req-519c3c40-2ba9-4cf1-9858-a12ac3da57cb tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Lock "c1efc2dd-6474-4fba-a00e-f104f0d446de" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.337s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 881.705665] env[61868]: DEBUG oslo_concurrency.lockutils [None req-832b114b-a72a-48a9-b479-c761019aa7a5 tempest-AttachInterfacesTestJSON-327680650 tempest-AttachInterfacesTestJSON-327680650-project-member] Lock "8cfd5561-08f1-49b8-b518-73104a987fc9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.590s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 881.716607] env[61868]: DEBUG nova.compute.manager [None req-fa446331-1e47-4e1d-8415-cf56bca350b4 tempest-AttachInterfacesV270Test-169603213 tempest-AttachInterfacesV270Test-169603213-project-member] [instance: f4eb7096-af70-4a46-8e9d-2b94a185afcb] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 881.744324] env[61868]: DEBUG nova.compute.manager [None req-fa446331-1e47-4e1d-8415-cf56bca350b4 tempest-AttachInterfacesV270Test-169603213 tempest-AttachInterfacesV270Test-169603213-project-member] [instance: f4eb7096-af70-4a46-8e9d-2b94a185afcb] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 881.768585] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fa446331-1e47-4e1d-8415-cf56bca350b4 tempest-AttachInterfacesV270Test-169603213 tempest-AttachInterfacesV270Test-169603213-project-member] Lock "f4eb7096-af70-4a46-8e9d-2b94a185afcb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.938s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 881.781472] env[61868]: DEBUG nova.compute.manager [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] [instance: 7a2f7e4c-5dde-456d-bf41-c2d0cc507d17] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 881.824291] env[61868]: ERROR nova.compute.manager [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] [instance: 7a2f7e4c-5dde-456d-bf41-c2d0cc507d17] Attempt to boot guest with tagged devices on host that does not support tagging.: nova.exception.BuildAbortException: Attempt to boot guest with tagged devices on host that does not support tagging. [ 881.824541] env[61868]: DEBUG nova.compute.manager [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] [instance: 7a2f7e4c-5dde-456d-bf41-c2d0cc507d17] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 881.824842] env[61868]: DEBUG oslo_concurrency.lockutils [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] Acquiring lock "refresh_cache-7a2f7e4c-5dde-456d-bf41-c2d0cc507d17" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 881.825025] env[61868]: DEBUG oslo_concurrency.lockutils [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] Acquired lock "refresh_cache-7a2f7e4c-5dde-456d-bf41-c2d0cc507d17" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 881.825239] env[61868]: DEBUG nova.network.neutron [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] [instance: 7a2f7e4c-5dde-456d-bf41-c2d0cc507d17] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 881.851337] env[61868]: DEBUG nova.network.neutron [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] [instance: 7a2f7e4c-5dde-456d-bf41-c2d0cc507d17] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 881.933815] env[61868]: DEBUG nova.network.neutron [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] [instance: 7a2f7e4c-5dde-456d-bf41-c2d0cc507d17] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.944645] env[61868]: DEBUG oslo_concurrency.lockutils [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] Releasing lock "refresh_cache-7a2f7e4c-5dde-456d-bf41-c2d0cc507d17" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 881.945084] env[61868]: DEBUG nova.compute.manager [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 881.945432] env[61868]: DEBUG nova.compute.manager [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] [instance: 7a2f7e4c-5dde-456d-bf41-c2d0cc507d17] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 881.945765] env[61868]: DEBUG nova.network.neutron [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] [instance: 7a2f7e4c-5dde-456d-bf41-c2d0cc507d17] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 881.964456] env[61868]: DEBUG nova.network.neutron [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] [instance: 7a2f7e4c-5dde-456d-bf41-c2d0cc507d17] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 882.351381] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 882.636728] env[61868]: DEBUG nova.network.neutron [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] [instance: 7a2f7e4c-5dde-456d-bf41-c2d0cc507d17] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.646229] env[61868]: INFO nova.compute.manager [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] [instance: 7a2f7e4c-5dde-456d-bf41-c2d0cc507d17] Took 0.70 seconds to deallocate network for instance. [ 882.694062] env[61868]: DEBUG nova.compute.manager [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] [instance: 7a2f7e4c-5dde-456d-bf41-c2d0cc507d17] Detaching volume: 44ab39ae-abdd-4754-a2cb-b71affdd7259 {{(pid=61868) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3225}} [ 882.694315] env[61868]: INFO nova.compute.manager [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] [instance: 7a2f7e4c-5dde-456d-bf41-c2d0cc507d17] Detaching volume 44ab39ae-abdd-4754-a2cb-b71affdd7259 [ 882.740209] env[61868]: DEBUG nova.virt.block_device [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] [instance: 7a2f7e4c-5dde-456d-bf41-c2d0cc507d17] Skipping driver_detach during remote rebuild. {{(pid=61868) _do_detach /opt/stack/nova/nova/virt/block_device.py:461}} [ 882.790042] env[61868]: WARNING nova.compute.manager [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] Failed to detach volume: 44ab39ae-abdd-4754-a2cb-b71affdd7259 due to 'NoneType' object has no attribute 'devices' [ 882.790511] env[61868]: DEBUG nova.compute.manager [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] [instance: 7a2f7e4c-5dde-456d-bf41-c2d0cc507d17] Detaching volume: 0858cc5a-0bb2-4121-bf41-c3a1afb67c5c {{(pid=61868) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3225}} [ 882.790971] env[61868]: INFO nova.compute.manager [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] [instance: 7a2f7e4c-5dde-456d-bf41-c2d0cc507d17] Detaching volume 0858cc5a-0bb2-4121-bf41-c3a1afb67c5c [ 882.824278] env[61868]: DEBUG nova.virt.block_device [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] [instance: 7a2f7e4c-5dde-456d-bf41-c2d0cc507d17] Skipping driver_detach during remote rebuild. {{(pid=61868) _do_detach /opt/stack/nova/nova/virt/block_device.py:461}} [ 882.883578] env[61868]: WARNING nova.compute.manager [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] Failed to detach volume: 0858cc5a-0bb2-4121-bf41-c3a1afb67c5c due to 'NoneType' object has no attribute 'devices' [ 882.883792] env[61868]: DEBUG nova.compute.manager [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] [instance: 7a2f7e4c-5dde-456d-bf41-c2d0cc507d17] Detaching volume: e317408b-9254-44ca-93d3-2ff9ebdc41fc {{(pid=61868) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3225}} [ 882.883974] env[61868]: INFO nova.compute.manager [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] [instance: 7a2f7e4c-5dde-456d-bf41-c2d0cc507d17] Detaching volume e317408b-9254-44ca-93d3-2ff9ebdc41fc [ 882.917552] env[61868]: DEBUG nova.virt.block_device [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] [instance: 7a2f7e4c-5dde-456d-bf41-c2d0cc507d17] Skipping driver_detach during remote rebuild. {{(pid=61868) _do_detach /opt/stack/nova/nova/virt/block_device.py:461}} [ 883.056016] env[61868]: INFO nova.scheduler.client.report [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] Deleted allocations for instance 7a2f7e4c-5dde-456d-bf41-c2d0cc507d17 [ 883.056325] env[61868]: DEBUG oslo_concurrency.lockutils [None req-515c5e44-6674-49ac-94a4-b11df76f8ffd tempest-TaggedBootDevicesTest-1269454556 tempest-TaggedBootDevicesTest-1269454556-project-member] Lock "7a2f7e4c-5dde-456d-bf41-c2d0cc507d17" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 184.121s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 883.070028] env[61868]: DEBUG nova.compute.manager [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 883.126135] env[61868]: DEBUG oslo_concurrency.lockutils [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 883.126135] env[61868]: DEBUG oslo_concurrency.lockutils [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 883.127589] env[61868]: INFO nova.compute.claims [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 883.354995] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 883.389125] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 883.530160] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a4568fd1-bb5a-4bce-b099-3f8cb1b9e938 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Acquiring lock "0e19eccc-446c-48c0-9428-54b71f0b03da" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 883.530418] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a4568fd1-bb5a-4bce-b099-3f8cb1b9e938 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Lock "0e19eccc-446c-48c0-9428-54b71f0b03da" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 883.549551] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f159bcc-fb8c-4993-93f7-f8b4100352bf {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.557778] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c8a1114-f1b7-4211-a870-e0f8b8a8779c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.590766] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d265c229-f3b1-4d12-b91f-b6c5d4da9dd7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.599072] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-908636fb-fe07-4716-b4a9-ca015ef55721 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.612714] env[61868]: DEBUG nova.compute.provider_tree [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 883.621372] env[61868]: DEBUG nova.scheduler.client.report [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 883.639095] env[61868]: DEBUG oslo_concurrency.lockutils [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.513s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 883.639770] env[61868]: DEBUG nova.compute.manager [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 883.642416] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.272s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 883.642590] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 883.642739] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 883.643947] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-568f4691-6a03-47bc-a18c-78a36bb00f02 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.653234] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83ac6093-a4f7-4113-baae-7dce905e65b7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.668720] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-536694f5-0969-4147-8f47-c0a713f22594 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.676046] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5b2b97f-b8ee-4207-854d-6591a96450f3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.680853] env[61868]: DEBUG nova.compute.utils [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 883.682916] env[61868]: DEBUG nova.compute.manager [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 883.683120] env[61868]: DEBUG nova.network.neutron [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 883.710570] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181926MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 883.710763] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 883.710954] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 883.713050] env[61868]: DEBUG nova.compute.manager [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 883.743468] env[61868]: DEBUG nova.network.neutron [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] No network configured {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1188}} [ 883.743695] env[61868]: DEBUG nova.compute.manager [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Instance network_info: |[]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 883.779448] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 883.779838] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 882b26da-9f56-4bec-b10b-7b46b4c6ae04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 883.780124] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4cfa680a-0ea3-4c40-b89c-b6067397427a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 883.780460] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ffe7c98f-1367-44fa-b8a2-f34b8de7dfde actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 883.780802] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4ed52e2d-018f-4405-9380-0c7f62ef2db3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 883.781081] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 2466fe4e-2589-4417-a63a-4d8bc695109d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 883.781386] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 1df194bf-fa9b-4d03-9b20-8478147de566 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 883.781719] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4cec72dc-99c1-4cf9-b391-a909bab7fb23 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 883.782157] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 972ab1c7-03b0-4294-930c-8084674083ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 883.782429] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d6ac9ed4-56dd-493a-8d9f-0cfad210b6de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 883.793373] env[61868]: DEBUG nova.compute.manager [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 883.797946] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 3394162c-605f-40a1-9dc8-dc5cba6a083f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.812800] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 26f77431-9a5d-444d-b345-10108c34b59b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.818080] env[61868]: DEBUG nova.virt.hardware [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 883.818325] env[61868]: DEBUG nova.virt.hardware [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 883.818479] env[61868]: DEBUG nova.virt.hardware [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 883.819268] env[61868]: DEBUG nova.virt.hardware [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 883.819268] env[61868]: DEBUG nova.virt.hardware [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 883.819268] env[61868]: DEBUG nova.virt.hardware [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 883.819386] env[61868]: DEBUG nova.virt.hardware [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 883.819519] env[61868]: DEBUG nova.virt.hardware [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 883.819687] env[61868]: DEBUG nova.virt.hardware [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 883.819851] env[61868]: DEBUG nova.virt.hardware [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 883.820058] env[61868]: DEBUG nova.virt.hardware [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 883.821325] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4fe5b12-27d1-4c23-ace8-717f75f08c58 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.825096] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 13ffc1dc-81be-40bc-94cf-b9ac06d98511 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.832383] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f9920b6-d581-44c3-b07f-9c6b2fed0335 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.837258] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 98afcb58-e992-469d-a8fd-94c5eaf69b31 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.851735] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Instance VIF info [] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 883.857608] env[61868]: DEBUG oslo.service.loopingcall [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 883.858355] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance bb3714cc-7f34-4a94-b682-aefda6f48ed3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.859794] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 883.860236] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-87fb5ae5-7149-4d66-83a5-891f126fe3dc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.873878] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 0ef31ff5-f19f-44b0-abb2-b92c76ee9b15 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.880107] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 883.880107] env[61868]: value = "task-40990" [ 883.880107] env[61868]: _type = "Task" [ 883.880107] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.886971] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 02387068-d9d8-4a13-b819-e1b9ef57a5d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.890749] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40990, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.897811] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 7958ae5c-6db5-4491-8e32-cfd2cab686d3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.909016] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 62c7b41d-1426-408b-a650-4f567171256a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.921049] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ad906b04-0b02-444a-b837-acde1c22df43 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.932761] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance bf46241a-b11c-46e7-b463-c48bc83c8ab2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.943714] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance a085b185-6663-460d-b2c8-9acee0a89311 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.956236] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 70ad4510-e8e3-4c1a-a58d-d76822248b9a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.968434] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 57439eb9-5e3d-49e7-a634-24cb78d86c99 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.981011] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 9a7f9e74-a298-4eee-bf31-153d671ab91a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.992627] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 0e19eccc-446c-48c0-9428-54b71f0b03da has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 883.992908] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 883.993055] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 884.372568] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12945a17-e0d3-4a6b-9283-1ca4cb563855 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.384340] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca89f94e-77b9-46b0-9cb1-0520d289ab97 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.392687] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40990, 'name': CreateVM_Task, 'duration_secs': 0.29688} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.415879] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 884.416495] env[61868]: DEBUG oslo_concurrency.lockutils [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 884.416732] env[61868]: DEBUG oslo_concurrency.lockutils [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 884.417463] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f0b2e1a-bc73-48d5-bb38-8799905338a4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.422563] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce07f184-8a12-4a06-8d07-df4a117b6758 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.438015] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2fdd97c-b28d-4963-912c-a8e76e812ccf {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.455775] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Reconfiguring VM instance to enable vnc on port - 5905 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 884.456498] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ed1e841-5141-4686-b2ad-425baab7e44d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.476476] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.485665] env[61868]: DEBUG oslo_vmware.api [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Waiting for the task: (returnval){ [ 884.485665] env[61868]: value = "task-40991" [ 884.485665] env[61868]: _type = "Task" [ 884.485665] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.487082] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 884.501603] env[61868]: DEBUG oslo_vmware.api [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Task: {'id': task-40991, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.503223] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 884.503405] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.792s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 885.002829] env[61868]: DEBUG oslo_vmware.api [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Task: {'id': task-40991, 'name': ReconfigVM_Task, 'duration_secs': 0.118247} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.004394] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Reconfigured VM instance to enable vnc on port - 5905 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 885.004875] env[61868]: DEBUG oslo_concurrency.lockutils [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.588s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 885.005416] env[61868]: DEBUG oslo_concurrency.lockutils [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 885.005824] env[61868]: DEBUG oslo_concurrency.lockutils [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 885.006475] env[61868]: DEBUG oslo_concurrency.lockutils [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 885.006990] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-210dac8a-6d22-4270-8804-514dd1bb1df0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.017704] env[61868]: DEBUG oslo_vmware.api [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Waiting for the task: (returnval){ [ 885.017704] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]5245ec77-215c-eb05-aec2-f81b3f953b57" [ 885.017704] env[61868]: _type = "Task" [ 885.017704] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.033756] env[61868]: DEBUG oslo_concurrency.lockutils [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 885.034502] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 885.035093] env[61868]: DEBUG oslo_concurrency.lockutils [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 885.500394] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 885.500656] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 885.500818] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 885.500938] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 885.520994] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 885.521159] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 885.521293] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 885.521416] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 885.521537] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 885.521900] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 885.522098] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 885.522235] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 885.522360] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 885.522590] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 885.522590] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 885.523172] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 885.523366] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 886.352167] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 886.352167] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 888.351447] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 888.351447] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 925.753582] env[61868]: WARNING oslo_vmware.rw_handles [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 925.753582] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 925.753582] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 925.753582] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 925.753582] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 925.753582] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 925.753582] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 925.753582] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 925.753582] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 925.753582] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 925.753582] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 925.753582] env[61868]: ERROR oslo_vmware.rw_handles [ 925.754383] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/7bb6b264-5879-4073-a4ef-1f46b395a758/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 925.755959] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 925.756228] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Copying Virtual Disk [datastore2] vmware_temp/7bb6b264-5879-4073-a4ef-1f46b395a758/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/7bb6b264-5879-4073-a4ef-1f46b395a758/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 925.756516] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-09408f83-44cb-4183-aa9d-8c65f4c3bdb3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.768396] env[61868]: DEBUG oslo_vmware.api [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Waiting for the task: (returnval){ [ 925.768396] env[61868]: value = "task-40993" [ 925.768396] env[61868]: _type = "Task" [ 925.768396] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.777601] env[61868]: DEBUG oslo_vmware.api [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Task: {'id': task-40993, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.280126] env[61868]: DEBUG oslo_vmware.exceptions [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 926.280420] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 926.281027] env[61868]: ERROR nova.compute.manager [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 926.281027] env[61868]: Faults: ['InvalidArgument'] [ 926.281027] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Traceback (most recent call last): [ 926.281027] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 926.281027] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] yield resources [ 926.281027] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 926.281027] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] self.driver.spawn(context, instance, image_meta, [ 926.281027] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 926.281027] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] self._vmops.spawn(context, instance, image_meta, injected_files, [ 926.281027] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 926.281027] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] self._fetch_image_if_missing(context, vi) [ 926.281027] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 926.281027] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] image_cache(vi, tmp_image_ds_loc) [ 926.281583] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 926.281583] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] vm_util.copy_virtual_disk( [ 926.281583] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 926.281583] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] session._wait_for_task(vmdk_copy_task) [ 926.281583] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 926.281583] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] return self.wait_for_task(task_ref) [ 926.281583] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 926.281583] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] return evt.wait() [ 926.281583] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 926.281583] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] result = hub.switch() [ 926.281583] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 926.281583] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] return self.greenlet.switch() [ 926.281583] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 926.282057] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] self.f(*self.args, **self.kw) [ 926.282057] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 926.282057] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] raise exceptions.translate_fault(task_info.error) [ 926.282057] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 926.282057] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Faults: ['InvalidArgument'] [ 926.282057] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] [ 926.282057] env[61868]: INFO nova.compute.manager [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Terminating instance [ 926.284035] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 926.284035] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 926.284035] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4131022a-4685-41bc-a605-c74478544df5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.288284] env[61868]: DEBUG nova.compute.manager [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 926.288471] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 926.289261] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d017d63-b9ec-44f8-bfe4-82996fd5e34d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.293227] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 926.293409] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 926.295293] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6789486e-afaf-4a23-ac43-32035ec386fd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.299374] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 926.299930] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-98cf0c87-9086-4f9a-8d57-14abee2158be {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.302554] env[61868]: DEBUG oslo_vmware.api [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Waiting for the task: (returnval){ [ 926.302554] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]527556ae-6eaa-b6fc-dd0e-5115c99772c9" [ 926.302554] env[61868]: _type = "Task" [ 926.302554] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.310595] env[61868]: DEBUG oslo_vmware.api [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]527556ae-6eaa-b6fc-dd0e-5115c99772c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.377238] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 926.377582] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 926.377900] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Deleting the datastore file [datastore2] 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69 {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 926.378232] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a9df2c32-c62c-47ae-add2-e464f9da093c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.385340] env[61868]: DEBUG oslo_vmware.api [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Waiting for the task: (returnval){ [ 926.385340] env[61868]: value = "task-40995" [ 926.385340] env[61868]: _type = "Task" [ 926.385340] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.394033] env[61868]: DEBUG oslo_vmware.api [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Task: {'id': task-40995, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.815408] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 926.815686] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Creating directory with path [datastore2] vmware_temp/febec5ea-eb5f-4348-9fd5-d3b28f4055e5/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 926.815832] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-843ad4f8-3b72-4d3d-8980-6de490162ea8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.828797] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Created directory with path [datastore2] vmware_temp/febec5ea-eb5f-4348-9fd5-d3b28f4055e5/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 926.829079] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Fetch image to [datastore2] vmware_temp/febec5ea-eb5f-4348-9fd5-d3b28f4055e5/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 926.829272] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/febec5ea-eb5f-4348-9fd5-d3b28f4055e5/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 926.830072] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde38210-f03f-43f2-b980-894832cbe830 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.838164] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dca8601-a073-4da5-a5cf-2069c734cc06 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.848676] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51003717-780a-419b-ae43-927c6c32b8f3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.880720] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f1fd9e8-e53a-48c7-9d70-46dafacb45d6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.890326] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-050e6ddf-b487-4bf7-b7df-c3835d2559e6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.897332] env[61868]: DEBUG oslo_vmware.api [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Task: {'id': task-40995, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065436} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.897573] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 926.897877] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 926.898083] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 926.898264] env[61868]: INFO nova.compute.manager [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Took 0.61 seconds to destroy the instance on the hypervisor. [ 926.900415] env[61868]: DEBUG nova.compute.claims [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 926.900608] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 926.900873] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 926.983894] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 927.119818] env[61868]: DEBUG oslo_vmware.rw_handles [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/febec5ea-eb5f-4348-9fd5-d3b28f4055e5/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 927.182892] env[61868]: DEBUG oslo_vmware.rw_handles [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 927.183122] env[61868]: DEBUG oslo_vmware.rw_handles [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/febec5ea-eb5f-4348-9fd5-d3b28f4055e5/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 927.376534] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e220e4c-523e-4f4c-9fe7-4647a89d357b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.385108] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c66b0a2-40d5-4f66-8a8c-dfbdc3d1f7b3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.416241] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bde609e-a160-488d-9d0b-be20d29ce6c1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.424515] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b098704f-aca7-482d-89ea-324ffbfc1b73 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.438274] env[61868]: DEBUG nova.compute.provider_tree [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 927.448069] env[61868]: DEBUG nova.scheduler.client.report [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 927.466753] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.566s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 927.467158] env[61868]: ERROR nova.compute.manager [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 927.467158] env[61868]: Faults: ['InvalidArgument'] [ 927.467158] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Traceback (most recent call last): [ 927.467158] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 927.467158] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] self.driver.spawn(context, instance, image_meta, [ 927.467158] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 927.467158] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] self._vmops.spawn(context, instance, image_meta, injected_files, [ 927.467158] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 927.467158] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] self._fetch_image_if_missing(context, vi) [ 927.467158] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 927.467158] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] image_cache(vi, tmp_image_ds_loc) [ 927.467158] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 927.467649] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] vm_util.copy_virtual_disk( [ 927.467649] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 927.467649] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] session._wait_for_task(vmdk_copy_task) [ 927.467649] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 927.467649] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] return self.wait_for_task(task_ref) [ 927.467649] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 927.467649] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] return evt.wait() [ 927.467649] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 927.467649] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] result = hub.switch() [ 927.467649] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 927.467649] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] return self.greenlet.switch() [ 927.467649] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 927.467649] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] self.f(*self.args, **self.kw) [ 927.468117] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 927.468117] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] raise exceptions.translate_fault(task_info.error) [ 927.468117] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 927.468117] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Faults: ['InvalidArgument'] [ 927.468117] env[61868]: ERROR nova.compute.manager [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] [ 927.468117] env[61868]: DEBUG nova.compute.utils [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 927.469475] env[61868]: DEBUG nova.compute.manager [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Build of instance 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69 was re-scheduled: A specified parameter was not correct: fileType [ 927.469475] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 927.469857] env[61868]: DEBUG nova.compute.manager [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 927.470032] env[61868]: DEBUG nova.compute.manager [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 927.470202] env[61868]: DEBUG nova.compute.manager [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 927.470366] env[61868]: DEBUG nova.network.neutron [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 927.789375] env[61868]: DEBUG nova.network.neutron [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.803902] env[61868]: INFO nova.compute.manager [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Took 0.33 seconds to deallocate network for instance. [ 927.918214] env[61868]: INFO nova.scheduler.client.report [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Deleted allocations for instance 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69 [ 927.940075] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4b0c7b35-b6ff-485d-bd95-eb24e2ad10cd tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Lock "3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 336.216s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 927.941242] env[61868]: DEBUG oslo_concurrency.lockutils [None req-37ee3211-1998-4ab6-bbc6-3ec02c5d794b tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Lock "3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 139.086s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 927.941500] env[61868]: DEBUG oslo_concurrency.lockutils [None req-37ee3211-1998-4ab6-bbc6-3ec02c5d794b tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Acquiring lock "3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 927.941675] env[61868]: DEBUG oslo_concurrency.lockutils [None req-37ee3211-1998-4ab6-bbc6-3ec02c5d794b tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Lock "3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 927.941853] env[61868]: DEBUG oslo_concurrency.lockutils [None req-37ee3211-1998-4ab6-bbc6-3ec02c5d794b tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Lock "3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 927.943842] env[61868]: INFO nova.compute.manager [None req-37ee3211-1998-4ab6-bbc6-3ec02c5d794b tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Terminating instance [ 927.945580] env[61868]: DEBUG nova.compute.manager [None req-37ee3211-1998-4ab6-bbc6-3ec02c5d794b tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 927.945970] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-37ee3211-1998-4ab6-bbc6-3ec02c5d794b tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 927.946234] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e5e81914-982c-4fb7-8f4d-14111c9bce8f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.956540] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe3a11b-3ffb-4d42-a178-14b293d52976 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.967777] env[61868]: DEBUG nova.compute.manager [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 927.991690] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-37ee3211-1998-4ab6-bbc6-3ec02c5d794b tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69 could not be found. [ 927.991927] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-37ee3211-1998-4ab6-bbc6-3ec02c5d794b tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 927.992139] env[61868]: INFO nova.compute.manager [None req-37ee3211-1998-4ab6-bbc6-3ec02c5d794b tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Took 0.05 seconds to destroy the instance on the hypervisor. [ 927.992386] env[61868]: DEBUG oslo.service.loopingcall [None req-37ee3211-1998-4ab6-bbc6-3ec02c5d794b tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 927.992612] env[61868]: DEBUG nova.compute.manager [-] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 927.992709] env[61868]: DEBUG nova.network.neutron [-] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 928.023780] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 928.024042] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 928.025539] env[61868]: INFO nova.compute.claims [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 928.028600] env[61868]: DEBUG nova.network.neutron [-] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.036283] env[61868]: INFO nova.compute.manager [-] [instance: 3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69] Took 0.04 seconds to deallocate network for instance. [ 928.166437] env[61868]: DEBUG oslo_concurrency.lockutils [None req-37ee3211-1998-4ab6-bbc6-3ec02c5d794b tempest-ServersTestJSON-63797321 tempest-ServersTestJSON-63797321-project-member] Lock "3fcfbbb6-17cb-4b8e-81ef-7d4dbc57dd69" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.225s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 928.440623] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad25bfd6-b31e-43ca-b928-72479bf4a1c2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.449468] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2473efd-9d1e-4c68-9cb9-da941c7cdafd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.484432] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4988cdbe-3d45-4adc-9e12-4547415fe0aa {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.492624] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd61873d-14a1-4a93-a5aa-9ac21b7b379b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.507777] env[61868]: DEBUG nova.compute.provider_tree [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 928.516862] env[61868]: DEBUG nova.scheduler.client.report [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 928.534287] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.510s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 928.534775] env[61868]: DEBUG nova.compute.manager [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 928.569001] env[61868]: DEBUG nova.compute.utils [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 928.570538] env[61868]: DEBUG nova.compute.manager [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 928.570737] env[61868]: DEBUG nova.network.neutron [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 928.584484] env[61868]: DEBUG nova.compute.manager [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 928.626013] env[61868]: DEBUG nova.policy [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '18897e7d91124330b7f1f4ebca1f0d76', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '575393450a20405e9d2b1bec5db93c73', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 928.661796] env[61868]: DEBUG nova.compute.manager [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 928.683720] env[61868]: DEBUG nova.virt.hardware [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 928.683974] env[61868]: DEBUG nova.virt.hardware [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 928.684242] env[61868]: DEBUG nova.virt.hardware [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 928.684370] env[61868]: DEBUG nova.virt.hardware [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 928.684515] env[61868]: DEBUG nova.virt.hardware [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 928.684662] env[61868]: DEBUG nova.virt.hardware [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 928.684867] env[61868]: DEBUG nova.virt.hardware [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 928.685026] env[61868]: DEBUG nova.virt.hardware [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 928.685187] env[61868]: DEBUG nova.virt.hardware [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 928.685343] env[61868]: DEBUG nova.virt.hardware [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 928.685508] env[61868]: DEBUG nova.virt.hardware [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 928.686428] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db3b599-b936-4196-ae70-271b20740d6f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.704166] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8122ba3f-e0cf-4138-ac58-70982ff655be {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.980633] env[61868]: DEBUG nova.network.neutron [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Successfully created port: d70380af-d6ed-4a3c-b595-0d9322f18ee4 {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 929.439027] env[61868]: DEBUG nova.network.neutron [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Successfully created port: e6b341fc-a993-4108-ae2b-9eff9a9a426c {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 930.518395] env[61868]: DEBUG nova.compute.manager [req-f780088e-afb0-4ba8-8807-3ef0e035383d req-d31bb6cd-eefa-47ac-bd8e-5612da97d238 service nova] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Received event network-vif-plugged-d70380af-d6ed-4a3c-b595-0d9322f18ee4 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 930.518722] env[61868]: DEBUG oslo_concurrency.lockutils [req-f780088e-afb0-4ba8-8807-3ef0e035383d req-d31bb6cd-eefa-47ac-bd8e-5612da97d238 service nova] Acquiring lock "3394162c-605f-40a1-9dc8-dc5cba6a083f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 930.518826] env[61868]: DEBUG oslo_concurrency.lockutils [req-f780088e-afb0-4ba8-8807-3ef0e035383d req-d31bb6cd-eefa-47ac-bd8e-5612da97d238 service nova] Lock "3394162c-605f-40a1-9dc8-dc5cba6a083f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 930.518993] env[61868]: DEBUG oslo_concurrency.lockutils [req-f780088e-afb0-4ba8-8807-3ef0e035383d req-d31bb6cd-eefa-47ac-bd8e-5612da97d238 service nova] Lock "3394162c-605f-40a1-9dc8-dc5cba6a083f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 930.519158] env[61868]: DEBUG nova.compute.manager [req-f780088e-afb0-4ba8-8807-3ef0e035383d req-d31bb6cd-eefa-47ac-bd8e-5612da97d238 service nova] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] No waiting events found dispatching network-vif-plugged-d70380af-d6ed-4a3c-b595-0d9322f18ee4 {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 930.519320] env[61868]: WARNING nova.compute.manager [req-f780088e-afb0-4ba8-8807-3ef0e035383d req-d31bb6cd-eefa-47ac-bd8e-5612da97d238 service nova] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Received unexpected event network-vif-plugged-d70380af-d6ed-4a3c-b595-0d9322f18ee4 for instance with vm_state building and task_state spawning. [ 930.693259] env[61868]: DEBUG nova.network.neutron [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Successfully updated port: d70380af-d6ed-4a3c-b595-0d9322f18ee4 {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 931.395000] env[61868]: DEBUG nova.network.neutron [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Successfully updated port: e6b341fc-a993-4108-ae2b-9eff9a9a426c {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 931.406520] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Acquiring lock "refresh_cache-3394162c-605f-40a1-9dc8-dc5cba6a083f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 931.406648] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Acquired lock "refresh_cache-3394162c-605f-40a1-9dc8-dc5cba6a083f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 931.406779] env[61868]: DEBUG nova.network.neutron [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 931.472824] env[61868]: DEBUG nova.network.neutron [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 932.053507] env[61868]: DEBUG nova.network.neutron [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Updating instance_info_cache with network_info: [{"id": "d70380af-d6ed-4a3c-b595-0d9322f18ee4", "address": "fa:16:3e:75:69:68", "network": {"id": "4c64503a-b2eb-475b-ae49-9ddca16f6a3c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-900901313", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "575393450a20405e9d2b1bec5db93c73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d70692eb-97b3-417c-a4ca-1ee888246ad9", "external-id": "nsx-vlan-transportzone-342", "segmentation_id": 342, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd70380af-d6", "ovs_interfaceid": "d70380af-d6ed-4a3c-b595-0d9322f18ee4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e6b341fc-a993-4108-ae2b-9eff9a9a426c", "address": "fa:16:3e:f4:1e:72", "network": {"id": "12fbe10a-c899-49d2-88fd-166df5d13b34", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-642023918", "subnets": [{"cidr": "10.0.0.16/28", "dns": [], "gateway": {"address": "10.0.0.17", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.22", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.18"}}], "meta": {"injected": false, "tenant_id": "575393450a20405e9d2b1bec5db93c73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6b341fc-a9", "ovs_interfaceid": "e6b341fc-a993-4108-ae2b-9eff9a9a426c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.069816] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Releasing lock "refresh_cache-3394162c-605f-40a1-9dc8-dc5cba6a083f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 932.070204] env[61868]: DEBUG nova.compute.manager [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Instance network_info: |[{"id": "d70380af-d6ed-4a3c-b595-0d9322f18ee4", "address": "fa:16:3e:75:69:68", "network": {"id": "4c64503a-b2eb-475b-ae49-9ddca16f6a3c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-900901313", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "575393450a20405e9d2b1bec5db93c73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d70692eb-97b3-417c-a4ca-1ee888246ad9", "external-id": "nsx-vlan-transportzone-342", "segmentation_id": 342, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd70380af-d6", "ovs_interfaceid": "d70380af-d6ed-4a3c-b595-0d9322f18ee4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e6b341fc-a993-4108-ae2b-9eff9a9a426c", "address": "fa:16:3e:f4:1e:72", "network": {"id": "12fbe10a-c899-49d2-88fd-166df5d13b34", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-642023918", "subnets": [{"cidr": "10.0.0.16/28", "dns": [], "gateway": {"address": "10.0.0.17", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.22", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.18"}}], "meta": {"injected": false, "tenant_id": "575393450a20405e9d2b1bec5db93c73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6b341fc-a9", "ovs_interfaceid": "e6b341fc-a993-4108-ae2b-9eff9a9a426c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 932.070989] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:69:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd70692eb-97b3-417c-a4ca-1ee888246ad9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd70380af-d6ed-4a3c-b595-0d9322f18ee4', 'vif_model': 'e1000'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:1e:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cae1d6a8-cbba-4bbf-af10-ba5467340475', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e6b341fc-a993-4108-ae2b-9eff9a9a426c', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 932.084556] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Creating folder: Project (575393450a20405e9d2b1bec5db93c73). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 932.085255] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d8fbc5c8-4222-40e3-b11a-5566af6871e4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.099591] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Created folder: Project (575393450a20405e9d2b1bec5db93c73) in parent group-v18181. [ 932.099591] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Creating folder: Instances. Parent ref: group-v18237. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 932.099591] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-833bb644-e004-4b04-872d-f0b4a8b6747f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.109853] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Created folder: Instances in parent group-v18237. [ 932.110274] env[61868]: DEBUG oslo.service.loopingcall [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 932.110584] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 932.110924] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-410600f9-82c9-4194-9504-30107428af4f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.143308] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 932.143308] env[61868]: value = "task-40998" [ 932.143308] env[61868]: _type = "Task" [ 932.143308] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.151884] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40998, 'name': CreateVM_Task} progress is 5%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.653814] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-40998, 'name': CreateVM_Task, 'duration_secs': 0.38887} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.653988] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 932.654833] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 932.655081] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 932.657950] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3bbc991-e9c2-4143-b531-24c670c85ded {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.692191] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Reconfiguring VM instance to enable vnc on port - 5906 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 932.692549] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d3754429-f282-45e4-b746-ad84a8a60779 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.706067] env[61868]: DEBUG nova.compute.manager [req-ae4b9466-d476-4baa-8f2d-7468698b8155 req-813b86bc-9e0f-415e-9419-b830b5151efe service nova] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Received event network-changed-d70380af-d6ed-4a3c-b595-0d9322f18ee4 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 932.706261] env[61868]: DEBUG nova.compute.manager [req-ae4b9466-d476-4baa-8f2d-7468698b8155 req-813b86bc-9e0f-415e-9419-b830b5151efe service nova] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Refreshing instance network info cache due to event network-changed-d70380af-d6ed-4a3c-b595-0d9322f18ee4. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 932.706473] env[61868]: DEBUG oslo_concurrency.lockutils [req-ae4b9466-d476-4baa-8f2d-7468698b8155 req-813b86bc-9e0f-415e-9419-b830b5151efe service nova] Acquiring lock "refresh_cache-3394162c-605f-40a1-9dc8-dc5cba6a083f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 932.706608] env[61868]: DEBUG oslo_concurrency.lockutils [req-ae4b9466-d476-4baa-8f2d-7468698b8155 req-813b86bc-9e0f-415e-9419-b830b5151efe service nova] Acquired lock "refresh_cache-3394162c-605f-40a1-9dc8-dc5cba6a083f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 932.706759] env[61868]: DEBUG nova.network.neutron [req-ae4b9466-d476-4baa-8f2d-7468698b8155 req-813b86bc-9e0f-415e-9419-b830b5151efe service nova] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Refreshing network info cache for port d70380af-d6ed-4a3c-b595-0d9322f18ee4 {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 932.714381] env[61868]: DEBUG oslo_vmware.api [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Waiting for the task: (returnval){ [ 932.714381] env[61868]: value = "task-40999" [ 932.714381] env[61868]: _type = "Task" [ 932.714381] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.727972] env[61868]: DEBUG oslo_vmware.api [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Task: {'id': task-40999, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.982700] env[61868]: DEBUG nova.network.neutron [req-ae4b9466-d476-4baa-8f2d-7468698b8155 req-813b86bc-9e0f-415e-9419-b830b5151efe service nova] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Updated VIF entry in instance network info cache for port d70380af-d6ed-4a3c-b595-0d9322f18ee4. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 932.983049] env[61868]: DEBUG nova.network.neutron [req-ae4b9466-d476-4baa-8f2d-7468698b8155 req-813b86bc-9e0f-415e-9419-b830b5151efe service nova] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Updating instance_info_cache with network_info: [{"id": "d70380af-d6ed-4a3c-b595-0d9322f18ee4", "address": "fa:16:3e:75:69:68", "network": {"id": "4c64503a-b2eb-475b-ae49-9ddca16f6a3c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-900901313", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "575393450a20405e9d2b1bec5db93c73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d70692eb-97b3-417c-a4ca-1ee888246ad9", "external-id": "nsx-vlan-transportzone-342", "segmentation_id": 342, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd70380af-d6", "ovs_interfaceid": "d70380af-d6ed-4a3c-b595-0d9322f18ee4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e6b341fc-a993-4108-ae2b-9eff9a9a426c", "address": "fa:16:3e:f4:1e:72", "network": {"id": "12fbe10a-c899-49d2-88fd-166df5d13b34", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-642023918", "subnets": [{"cidr": "10.0.0.16/28", "dns": [], "gateway": {"address": "10.0.0.17", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.22", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.18"}}], "meta": {"injected": false, "tenant_id": "575393450a20405e9d2b1bec5db93c73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6b341fc-a9", "ovs_interfaceid": "e6b341fc-a993-4108-ae2b-9eff9a9a426c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.992704] env[61868]: DEBUG oslo_concurrency.lockutils [req-ae4b9466-d476-4baa-8f2d-7468698b8155 req-813b86bc-9e0f-415e-9419-b830b5151efe service nova] Releasing lock "refresh_cache-3394162c-605f-40a1-9dc8-dc5cba6a083f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 932.992938] env[61868]: DEBUG nova.compute.manager [req-ae4b9466-d476-4baa-8f2d-7468698b8155 req-813b86bc-9e0f-415e-9419-b830b5151efe service nova] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Received event network-vif-plugged-e6b341fc-a993-4108-ae2b-9eff9a9a426c {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 932.993147] env[61868]: DEBUG oslo_concurrency.lockutils [req-ae4b9466-d476-4baa-8f2d-7468698b8155 req-813b86bc-9e0f-415e-9419-b830b5151efe service nova] Acquiring lock "3394162c-605f-40a1-9dc8-dc5cba6a083f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 932.993340] env[61868]: DEBUG oslo_concurrency.lockutils [req-ae4b9466-d476-4baa-8f2d-7468698b8155 req-813b86bc-9e0f-415e-9419-b830b5151efe service nova] Lock "3394162c-605f-40a1-9dc8-dc5cba6a083f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 932.993492] env[61868]: DEBUG oslo_concurrency.lockutils [req-ae4b9466-d476-4baa-8f2d-7468698b8155 req-813b86bc-9e0f-415e-9419-b830b5151efe service nova] Lock "3394162c-605f-40a1-9dc8-dc5cba6a083f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 932.993647] env[61868]: DEBUG nova.compute.manager [req-ae4b9466-d476-4baa-8f2d-7468698b8155 req-813b86bc-9e0f-415e-9419-b830b5151efe service nova] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] No waiting events found dispatching network-vif-plugged-e6b341fc-a993-4108-ae2b-9eff9a9a426c {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 932.993802] env[61868]: WARNING nova.compute.manager [req-ae4b9466-d476-4baa-8f2d-7468698b8155 req-813b86bc-9e0f-415e-9419-b830b5151efe service nova] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Received unexpected event network-vif-plugged-e6b341fc-a993-4108-ae2b-9eff9a9a426c for instance with vm_state building and task_state spawning. [ 932.993955] env[61868]: DEBUG nova.compute.manager [req-ae4b9466-d476-4baa-8f2d-7468698b8155 req-813b86bc-9e0f-415e-9419-b830b5151efe service nova] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Received event network-changed-e6b341fc-a993-4108-ae2b-9eff9a9a426c {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 932.994104] env[61868]: DEBUG nova.compute.manager [req-ae4b9466-d476-4baa-8f2d-7468698b8155 req-813b86bc-9e0f-415e-9419-b830b5151efe service nova] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Refreshing instance network info cache due to event network-changed-e6b341fc-a993-4108-ae2b-9eff9a9a426c. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 932.994275] env[61868]: DEBUG oslo_concurrency.lockutils [req-ae4b9466-d476-4baa-8f2d-7468698b8155 req-813b86bc-9e0f-415e-9419-b830b5151efe service nova] Acquiring lock "refresh_cache-3394162c-605f-40a1-9dc8-dc5cba6a083f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 932.994405] env[61868]: DEBUG oslo_concurrency.lockutils [req-ae4b9466-d476-4baa-8f2d-7468698b8155 req-813b86bc-9e0f-415e-9419-b830b5151efe service nova] Acquired lock "refresh_cache-3394162c-605f-40a1-9dc8-dc5cba6a083f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 932.994552] env[61868]: DEBUG nova.network.neutron [req-ae4b9466-d476-4baa-8f2d-7468698b8155 req-813b86bc-9e0f-415e-9419-b830b5151efe service nova] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Refreshing network info cache for port e6b341fc-a993-4108-ae2b-9eff9a9a426c {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 933.226750] env[61868]: DEBUG oslo_vmware.api [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Task: {'id': task-40999, 'name': ReconfigVM_Task, 'duration_secs': 0.107113} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.227431] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Reconfigured VM instance to enable vnc on port - 5906 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 933.227772] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.573s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 933.228418] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 933.228701] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 933.229122] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 933.230105] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5f4507e-8789-4edd-9c08-e1fb0f2b6d30 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.235477] env[61868]: DEBUG oslo_vmware.api [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Waiting for the task: (returnval){ [ 933.235477] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]5216d4a0-0e91-143a-83ea-109ce7ada7d7" [ 933.235477] env[61868]: _type = "Task" [ 933.235477] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.250956] env[61868]: DEBUG oslo_vmware.api [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]5216d4a0-0e91-143a-83ea-109ce7ada7d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.390762] env[61868]: DEBUG nova.network.neutron [req-ae4b9466-d476-4baa-8f2d-7468698b8155 req-813b86bc-9e0f-415e-9419-b830b5151efe service nova] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Updated VIF entry in instance network info cache for port e6b341fc-a993-4108-ae2b-9eff9a9a426c. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 933.391762] env[61868]: DEBUG nova.network.neutron [req-ae4b9466-d476-4baa-8f2d-7468698b8155 req-813b86bc-9e0f-415e-9419-b830b5151efe service nova] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Updating instance_info_cache with network_info: [{"id": "d70380af-d6ed-4a3c-b595-0d9322f18ee4", "address": "fa:16:3e:75:69:68", "network": {"id": "4c64503a-b2eb-475b-ae49-9ddca16f6a3c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-900901313", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "575393450a20405e9d2b1bec5db93c73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d70692eb-97b3-417c-a4ca-1ee888246ad9", "external-id": "nsx-vlan-transportzone-342", "segmentation_id": 342, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd70380af-d6", "ovs_interfaceid": "d70380af-d6ed-4a3c-b595-0d9322f18ee4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e6b341fc-a993-4108-ae2b-9eff9a9a426c", "address": "fa:16:3e:f4:1e:72", "network": {"id": "12fbe10a-c899-49d2-88fd-166df5d13b34", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-642023918", "subnets": [{"cidr": "10.0.0.16/28", "dns": [], "gateway": {"address": "10.0.0.17", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.22", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.18"}}], "meta": {"injected": false, "tenant_id": "575393450a20405e9d2b1bec5db93c73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6b341fc-a9", "ovs_interfaceid": "e6b341fc-a993-4108-ae2b-9eff9a9a426c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.403777] env[61868]: DEBUG oslo_concurrency.lockutils [req-ae4b9466-d476-4baa-8f2d-7468698b8155 req-813b86bc-9e0f-415e-9419-b830b5151efe service nova] Releasing lock "refresh_cache-3394162c-605f-40a1-9dc8-dc5cba6a083f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 933.746519] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 933.746701] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 933.746899] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 934.135981] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6660c933-17c0-4b20-8de3-e646a8574d54 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquiring lock "d6ac9ed4-56dd-493a-8d9f-0cfad210b6de" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 938.427364] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Acquiring lock "a8e7708c-b9ee-465b-8df8-798983c6f06c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 938.427775] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Lock "a8e7708c-b9ee-465b-8df8-798983c6f06c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 943.352095] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 945.350985] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 945.351334] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 945.361641] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 945.361867] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 945.362037] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 945.362190] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 945.363324] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e397487b-1e26-4855-b522-50456f7aafc8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.372638] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b80d4f5d-9690-47af-9c19-f0308b7d87f6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.387429] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b069c099-3cfa-4a18-aa09-5a9ed39920ff {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.394351] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a05c638d-7603-4a77-bccd-f26574515eee {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.424024] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181933MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 945.424188] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 945.424387] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 945.491796] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 882b26da-9f56-4bec-b10b-7b46b4c6ae04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.491975] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4cfa680a-0ea3-4c40-b89c-b6067397427a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.492116] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ffe7c98f-1367-44fa-b8a2-f34b8de7dfde actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.492237] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4ed52e2d-018f-4405-9380-0c7f62ef2db3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.492353] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 2466fe4e-2589-4417-a63a-4d8bc695109d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.492469] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 1df194bf-fa9b-4d03-9b20-8478147de566 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.492582] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4cec72dc-99c1-4cf9-b391-a909bab7fb23 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.492694] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 972ab1c7-03b0-4294-930c-8084674083ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.492807] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d6ac9ed4-56dd-493a-8d9f-0cfad210b6de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.492916] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 3394162c-605f-40a1-9dc8-dc5cba6a083f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.503963] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 26f77431-9a5d-444d-b345-10108c34b59b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 945.536575] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 13ffc1dc-81be-40bc-94cf-b9ac06d98511 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 945.547107] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 98afcb58-e992-469d-a8fd-94c5eaf69b31 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 945.557073] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance bb3714cc-7f34-4a94-b682-aefda6f48ed3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 945.566544] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 0ef31ff5-f19f-44b0-abb2-b92c76ee9b15 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 945.576284] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 02387068-d9d8-4a13-b819-e1b9ef57a5d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 945.586854] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 7958ae5c-6db5-4491-8e32-cfd2cab686d3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 945.597314] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 62c7b41d-1426-408b-a650-4f567171256a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 945.607258] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ad906b04-0b02-444a-b837-acde1c22df43 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 945.617104] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance bf46241a-b11c-46e7-b463-c48bc83c8ab2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 945.627252] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance a085b185-6663-460d-b2c8-9acee0a89311 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 945.637080] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 70ad4510-e8e3-4c1a-a58d-d76822248b9a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 945.647409] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 57439eb9-5e3d-49e7-a634-24cb78d86c99 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 945.656942] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 9a7f9e74-a298-4eee-bf31-153d671ab91a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 945.667950] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 0e19eccc-446c-48c0-9428-54b71f0b03da has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 945.677331] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance a8e7708c-b9ee-465b-8df8-798983c6f06c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 945.677568] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 945.677711] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 945.986691] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6bd29c2-2765-4875-ac4f-e577dc2d883a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.994440] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e0b980f-c37c-43cf-8bda-d19f30d11b8a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.026477] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a1665f9-f583-40b8-8c00-09d063d9f355 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.034700] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5fdcdd7-0ffd-42a8-a954-b3ce79f1fa8b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.048764] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 946.057731] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 946.075250] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 946.075451] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.651s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 947.071819] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 947.072155] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 947.350684] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 947.350920] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 947.351027] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 947.375528] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 947.375716] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 947.375825] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 947.375955] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 947.376089] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 947.376211] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 947.376333] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 947.376462] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 947.376583] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 947.376706] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 947.376822] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 947.377362] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 947.377762] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 948.351112] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 948.351387] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 976.136858] env[61868]: WARNING oslo_vmware.rw_handles [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 976.136858] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 976.136858] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 976.136858] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 976.136858] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 976.136858] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 976.136858] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 976.136858] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 976.136858] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 976.136858] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 976.136858] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 976.136858] env[61868]: ERROR oslo_vmware.rw_handles [ 976.136858] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/febec5ea-eb5f-4348-9fd5-d3b28f4055e5/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 976.138871] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 976.139121] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Copying Virtual Disk [datastore2] vmware_temp/febec5ea-eb5f-4348-9fd5-d3b28f4055e5/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/febec5ea-eb5f-4348-9fd5-d3b28f4055e5/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 976.139412] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7a125f8d-cd0e-4af6-a053-7dbcbae5e6d3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.148030] env[61868]: DEBUG oslo_vmware.api [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Waiting for the task: (returnval){ [ 976.148030] env[61868]: value = "task-41000" [ 976.148030] env[61868]: _type = "Task" [ 976.148030] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.157204] env[61868]: DEBUG oslo_vmware.api [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Task: {'id': task-41000, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.658369] env[61868]: DEBUG oslo_vmware.exceptions [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 976.658663] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 976.659254] env[61868]: ERROR nova.compute.manager [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 976.659254] env[61868]: Faults: ['InvalidArgument'] [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Traceback (most recent call last): [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] yield resources [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] self.driver.spawn(context, instance, image_meta, [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] self._vmops.spawn(context, instance, image_meta, injected_files, [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] self._fetch_image_if_missing(context, vi) [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] image_cache(vi, tmp_image_ds_loc) [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] vm_util.copy_virtual_disk( [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] session._wait_for_task(vmdk_copy_task) [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] return self.wait_for_task(task_ref) [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] return evt.wait() [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] result = hub.switch() [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] return self.greenlet.switch() [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] self.f(*self.args, **self.kw) [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] raise exceptions.translate_fault(task_info.error) [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Faults: ['InvalidArgument'] [ 976.659254] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] [ 976.660129] env[61868]: INFO nova.compute.manager [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Terminating instance [ 976.661297] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 976.661499] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 976.661755] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-541a8684-8e35-4d73-9805-19694246f464 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.664274] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Acquiring lock "refresh_cache-882b26da-9f56-4bec-b10b-7b46b4c6ae04" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 976.664274] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Acquired lock "refresh_cache-882b26da-9f56-4bec-b10b-7b46b4c6ae04" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 976.664409] env[61868]: DEBUG nova.network.neutron [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 976.671412] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 976.671593] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 976.672835] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26166377-ba7e-4aef-8737-fb60d97c471c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.680703] env[61868]: DEBUG oslo_vmware.api [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Waiting for the task: (returnval){ [ 976.680703] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52665904-41e9-560c-5d6d-3999923e0607" [ 976.680703] env[61868]: _type = "Task" [ 976.680703] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.688639] env[61868]: DEBUG oslo_vmware.api [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52665904-41e9-560c-5d6d-3999923e0607, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.696611] env[61868]: DEBUG nova.network.neutron [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 976.720997] env[61868]: DEBUG nova.network.neutron [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.730335] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Releasing lock "refresh_cache-882b26da-9f56-4bec-b10b-7b46b4c6ae04" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 976.730893] env[61868]: DEBUG nova.compute.manager [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 976.731086] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 976.732163] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38ec0dfd-ee4a-4bed-a666-2aabfc3ed9ec {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.740331] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 976.740561] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-03037ce4-7c0d-4cc7-8b0f-3dcb7bcebe71 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.773686] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 976.773851] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 976.774041] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Deleting the datastore file [datastore2] 882b26da-9f56-4bec-b10b-7b46b4c6ae04 {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 976.774363] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9b0495fd-ea48-46f6-b2ab-144936ad24c4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.781966] env[61868]: DEBUG oslo_vmware.api [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Waiting for the task: (returnval){ [ 976.781966] env[61868]: value = "task-41002" [ 976.781966] env[61868]: _type = "Task" [ 976.781966] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.790948] env[61868]: DEBUG oslo_vmware.api [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Task: {'id': task-41002, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.193765] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 977.194145] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Creating directory with path [datastore2] vmware_temp/4928d747-ec7e-4fdd-83af-22395b4972c1/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 977.194247] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3f6bd8b5-af31-4526-9373-ec4d9d00b4d9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.206459] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Created directory with path [datastore2] vmware_temp/4928d747-ec7e-4fdd-83af-22395b4972c1/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 977.206715] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Fetch image to [datastore2] vmware_temp/4928d747-ec7e-4fdd-83af-22395b4972c1/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 977.206969] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/4928d747-ec7e-4fdd-83af-22395b4972c1/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 977.207723] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3d280f2-fc52-400c-96f4-0f13227e9218 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.214740] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5e87cc7-2446-46db-b4be-ca086a834fb2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.223751] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d28eb57-9c48-4a9c-9be7-4588973bb9d4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.255301] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdd00d1c-7433-4e71-a67f-ce5e1e796167 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.263156] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ced4d6bf-6fc8-4c6a-99b9-e8a2fec45919 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.290967] env[61868]: DEBUG oslo_vmware.api [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Task: {'id': task-41002, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.040645} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.291179] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 977.291363] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 977.291536] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 977.291704] env[61868]: INFO nova.compute.manager [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Took 0.56 seconds to destroy the instance on the hypervisor. [ 977.291939] env[61868]: DEBUG oslo.service.loopingcall [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 977.292145] env[61868]: DEBUG nova.compute.manager [-] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 977.292242] env[61868]: DEBUG nova.network.neutron [-] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 977.315533] env[61868]: DEBUG nova.network.neutron [-] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 977.323927] env[61868]: DEBUG nova.network.neutron [-] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.332719] env[61868]: INFO nova.compute.manager [-] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Took 0.04 seconds to deallocate network for instance. [ 977.334798] env[61868]: DEBUG nova.compute.claims [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 977.334998] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 977.335280] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 977.353624] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 977.411332] env[61868]: DEBUG oslo_vmware.rw_handles [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4928d747-ec7e-4fdd-83af-22395b4972c1/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 977.472259] env[61868]: DEBUG oslo_vmware.rw_handles [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 977.472259] env[61868]: DEBUG oslo_vmware.rw_handles [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4928d747-ec7e-4fdd-83af-22395b4972c1/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 977.760328] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70dde520-2ab6-4517-beea-d73fbd929d0a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.768451] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1341a4d1-b35e-4a80-bcc5-041c0fd81694 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.804819] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1348fa5-49e4-4a48-bb51-8a022a67126a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.812754] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aad4503-4842-4b3b-86aa-63327fb4e60d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.826273] env[61868]: DEBUG nova.compute.provider_tree [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 977.834917] env[61868]: DEBUG nova.scheduler.client.report [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 977.852365] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.517s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 977.852877] env[61868]: ERROR nova.compute.manager [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 977.852877] env[61868]: Faults: ['InvalidArgument'] [ 977.852877] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Traceback (most recent call last): [ 977.852877] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 977.852877] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] self.driver.spawn(context, instance, image_meta, [ 977.852877] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 977.852877] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] self._vmops.spawn(context, instance, image_meta, injected_files, [ 977.852877] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 977.852877] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] self._fetch_image_if_missing(context, vi) [ 977.852877] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 977.852877] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] image_cache(vi, tmp_image_ds_loc) [ 977.852877] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 977.852877] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] vm_util.copy_virtual_disk( [ 977.852877] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 977.852877] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] session._wait_for_task(vmdk_copy_task) [ 977.852877] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 977.852877] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] return self.wait_for_task(task_ref) [ 977.852877] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 977.852877] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] return evt.wait() [ 977.852877] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 977.852877] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] result = hub.switch() [ 977.852877] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 977.852877] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] return self.greenlet.switch() [ 977.852877] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 977.852877] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] self.f(*self.args, **self.kw) [ 977.852877] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 977.852877] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] raise exceptions.translate_fault(task_info.error) [ 977.852877] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 977.852877] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Faults: ['InvalidArgument'] [ 977.852877] env[61868]: ERROR nova.compute.manager [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] [ 977.853719] env[61868]: DEBUG nova.compute.utils [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 977.854990] env[61868]: DEBUG nova.compute.manager [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Build of instance 882b26da-9f56-4bec-b10b-7b46b4c6ae04 was re-scheduled: A specified parameter was not correct: fileType [ 977.854990] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 977.855348] env[61868]: DEBUG nova.compute.manager [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 977.855571] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Acquiring lock "refresh_cache-882b26da-9f56-4bec-b10b-7b46b4c6ae04" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 977.855719] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Acquired lock "refresh_cache-882b26da-9f56-4bec-b10b-7b46b4c6ae04" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 977.855878] env[61868]: DEBUG nova.network.neutron [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 977.882652] env[61868]: DEBUG nova.network.neutron [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 977.908684] env[61868]: DEBUG nova.network.neutron [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.918001] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Releasing lock "refresh_cache-882b26da-9f56-4bec-b10b-7b46b4c6ae04" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 977.918238] env[61868]: DEBUG nova.compute.manager [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 977.918404] env[61868]: DEBUG nova.compute.manager [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 977.918572] env[61868]: DEBUG nova.network.neutron [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 977.938267] env[61868]: DEBUG nova.network.neutron [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 977.945297] env[61868]: DEBUG nova.network.neutron [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.954224] env[61868]: INFO nova.compute.manager [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Took 0.04 seconds to deallocate network for instance. [ 978.050635] env[61868]: INFO nova.scheduler.client.report [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Deleted allocations for instance 882b26da-9f56-4bec-b10b-7b46b4c6ae04 [ 978.069796] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9cde7c8c-ea9a-49f1-a5d8-1caaf31f4740 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Lock "882b26da-9f56-4bec-b10b-7b46b4c6ae04" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 379.481s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 978.070886] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0d45206c-b362-431c-8ebb-b5d72678a346 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Lock "882b26da-9f56-4bec-b10b-7b46b4c6ae04" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 179.545s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 978.071106] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0d45206c-b362-431c-8ebb-b5d72678a346 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Acquiring lock "882b26da-9f56-4bec-b10b-7b46b4c6ae04-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 978.071324] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0d45206c-b362-431c-8ebb-b5d72678a346 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Lock "882b26da-9f56-4bec-b10b-7b46b4c6ae04-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 978.071490] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0d45206c-b362-431c-8ebb-b5d72678a346 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Lock "882b26da-9f56-4bec-b10b-7b46b4c6ae04-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 978.073869] env[61868]: INFO nova.compute.manager [None req-0d45206c-b362-431c-8ebb-b5d72678a346 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Terminating instance [ 978.075627] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0d45206c-b362-431c-8ebb-b5d72678a346 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Acquiring lock "refresh_cache-882b26da-9f56-4bec-b10b-7b46b4c6ae04" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 978.075797] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0d45206c-b362-431c-8ebb-b5d72678a346 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Acquired lock "refresh_cache-882b26da-9f56-4bec-b10b-7b46b4c6ae04" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 978.075983] env[61868]: DEBUG nova.network.neutron [None req-0d45206c-b362-431c-8ebb-b5d72678a346 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 978.085477] env[61868]: DEBUG nova.compute.manager [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 978.113872] env[61868]: DEBUG nova.network.neutron [None req-0d45206c-b362-431c-8ebb-b5d72678a346 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 978.144085] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 978.144366] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 978.146079] env[61868]: INFO nova.compute.claims [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 978.149944] env[61868]: DEBUG nova.network.neutron [None req-0d45206c-b362-431c-8ebb-b5d72678a346 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.159402] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0d45206c-b362-431c-8ebb-b5d72678a346 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Releasing lock "refresh_cache-882b26da-9f56-4bec-b10b-7b46b4c6ae04" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 978.159792] env[61868]: DEBUG nova.compute.manager [None req-0d45206c-b362-431c-8ebb-b5d72678a346 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 978.159978] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-0d45206c-b362-431c-8ebb-b5d72678a346 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 978.160538] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-67f2f776-d1f9-4032-a201-a192e4ea7f38 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.171391] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91fe2d05-8769-4117-90dd-0904676b4feb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.200107] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-0d45206c-b362-431c-8ebb-b5d72678a346 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 882b26da-9f56-4bec-b10b-7b46b4c6ae04 could not be found. [ 978.200443] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-0d45206c-b362-431c-8ebb-b5d72678a346 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 978.200483] env[61868]: INFO nova.compute.manager [None req-0d45206c-b362-431c-8ebb-b5d72678a346 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Took 0.04 seconds to destroy the instance on the hypervisor. [ 978.200745] env[61868]: DEBUG oslo.service.loopingcall [None req-0d45206c-b362-431c-8ebb-b5d72678a346 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 978.203424] env[61868]: DEBUG nova.compute.manager [-] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 978.203518] env[61868]: DEBUG nova.network.neutron [-] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 978.224149] env[61868]: DEBUG nova.network.neutron [-] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 978.232139] env[61868]: DEBUG nova.network.neutron [-] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.243870] env[61868]: INFO nova.compute.manager [-] [instance: 882b26da-9f56-4bec-b10b-7b46b4c6ae04] Took 0.04 seconds to deallocate network for instance. [ 978.342707] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0d45206c-b362-431c-8ebb-b5d72678a346 tempest-ServerDiagnosticsTest-2048545803 tempest-ServerDiagnosticsTest-2048545803-project-member] Lock "882b26da-9f56-4bec-b10b-7b46b4c6ae04" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.272s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 978.540483] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f05b184f-6e28-44e3-8cc5-4df3cf4a0a92 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.549203] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab1260b3-83d7-41bc-aacf-9fdbe6df7dc3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.581993] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-060b5f14-014e-4b94-81e7-5b7dfd05aa16 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.590021] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b69d4555-3a90-45bb-93cb-0e95c44ca1db {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.603932] env[61868]: DEBUG nova.compute.provider_tree [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 978.613351] env[61868]: DEBUG nova.scheduler.client.report [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 978.630718] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.486s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 978.631303] env[61868]: DEBUG nova.compute.manager [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 978.668699] env[61868]: DEBUG nova.compute.utils [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 978.670575] env[61868]: DEBUG nova.compute.manager [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 978.670885] env[61868]: DEBUG nova.network.neutron [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 978.682525] env[61868]: DEBUG nova.compute.manager [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 978.716307] env[61868]: DEBUG nova.policy [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bf74f15b527146bb9bc726e54d220a65', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '01d5fac165e449d49cd6e9d9c7e9d116', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 978.754601] env[61868]: DEBUG nova.compute.manager [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 978.778493] env[61868]: DEBUG nova.virt.hardware [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 978.778754] env[61868]: DEBUG nova.virt.hardware [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 978.778897] env[61868]: DEBUG nova.virt.hardware [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 978.779075] env[61868]: DEBUG nova.virt.hardware [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 978.779217] env[61868]: DEBUG nova.virt.hardware [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 978.779363] env[61868]: DEBUG nova.virt.hardware [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 978.779566] env[61868]: DEBUG nova.virt.hardware [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 978.779725] env[61868]: DEBUG nova.virt.hardware [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 978.779890] env[61868]: DEBUG nova.virt.hardware [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 978.780392] env[61868]: DEBUG nova.virt.hardware [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 978.780392] env[61868]: DEBUG nova.virt.hardware [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 978.781249] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b465aea2-7173-4c8d-87be-d81855344498 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.789444] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e4c344b-4698-4f63-a4c1-3c05b5b7be8b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.190909] env[61868]: DEBUG nova.network.neutron [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Successfully created port: 9f030bea-e7a1-48fe-901d-cac6c72e7e40 {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 979.807249] env[61868]: DEBUG nova.network.neutron [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Successfully updated port: 9f030bea-e7a1-48fe-901d-cac6c72e7e40 {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 979.822073] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "refresh_cache-26f77431-9a5d-444d-b345-10108c34b59b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 979.822073] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquired lock "refresh_cache-26f77431-9a5d-444d-b345-10108c34b59b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 979.822073] env[61868]: DEBUG nova.network.neutron [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 979.861812] env[61868]: DEBUG nova.network.neutron [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 980.108709] env[61868]: DEBUG nova.network.neutron [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Updating instance_info_cache with network_info: [{"id": "9f030bea-e7a1-48fe-901d-cac6c72e7e40", "address": "fa:16:3e:8a:1a:05", "network": {"id": "843ef760-8abf-4cfe-bcb8-9ec2e65dca2f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1286639074-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "01d5fac165e449d49cd6e9d9c7e9d116", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f030bea-e7", "ovs_interfaceid": "9f030bea-e7a1-48fe-901d-cac6c72e7e40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.112260] env[61868]: DEBUG nova.compute.manager [req-a17374aa-0981-4e24-aa98-383c20857742 req-dd4a373b-e67d-4099-8826-d76f4fa7983c service nova] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Received event network-vif-plugged-9f030bea-e7a1-48fe-901d-cac6c72e7e40 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 980.112260] env[61868]: DEBUG oslo_concurrency.lockutils [req-a17374aa-0981-4e24-aa98-383c20857742 req-dd4a373b-e67d-4099-8826-d76f4fa7983c service nova] Acquiring lock "26f77431-9a5d-444d-b345-10108c34b59b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 980.112260] env[61868]: DEBUG oslo_concurrency.lockutils [req-a17374aa-0981-4e24-aa98-383c20857742 req-dd4a373b-e67d-4099-8826-d76f4fa7983c service nova] Lock "26f77431-9a5d-444d-b345-10108c34b59b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 980.112260] env[61868]: DEBUG oslo_concurrency.lockutils [req-a17374aa-0981-4e24-aa98-383c20857742 req-dd4a373b-e67d-4099-8826-d76f4fa7983c service nova] Lock "26f77431-9a5d-444d-b345-10108c34b59b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 980.112393] env[61868]: DEBUG nova.compute.manager [req-a17374aa-0981-4e24-aa98-383c20857742 req-dd4a373b-e67d-4099-8826-d76f4fa7983c service nova] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] No waiting events found dispatching network-vif-plugged-9f030bea-e7a1-48fe-901d-cac6c72e7e40 {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 980.113743] env[61868]: WARNING nova.compute.manager [req-a17374aa-0981-4e24-aa98-383c20857742 req-dd4a373b-e67d-4099-8826-d76f4fa7983c service nova] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Received unexpected event network-vif-plugged-9f030bea-e7a1-48fe-901d-cac6c72e7e40 for instance with vm_state building and task_state spawning. [ 980.123712] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Releasing lock "refresh_cache-26f77431-9a5d-444d-b345-10108c34b59b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 980.124176] env[61868]: DEBUG nova.compute.manager [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Instance network_info: |[{"id": "9f030bea-e7a1-48fe-901d-cac6c72e7e40", "address": "fa:16:3e:8a:1a:05", "network": {"id": "843ef760-8abf-4cfe-bcb8-9ec2e65dca2f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1286639074-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "01d5fac165e449d49cd6e9d9c7e9d116", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f030bea-e7", "ovs_interfaceid": "9f030bea-e7a1-48fe-901d-cac6c72e7e40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 980.124455] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:1a:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4020f51-6e46-4b73-a79e-9fe3fd51b917', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9f030bea-e7a1-48fe-901d-cac6c72e7e40', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 980.132557] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Creating folder: Project (01d5fac165e449d49cd6e9d9c7e9d116). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 980.133502] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9a63c895-6208-4dc0-acd5-8ad77846c4ee {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.146213] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Created folder: Project (01d5fac165e449d49cd6e9d9c7e9d116) in parent group-v18181. [ 980.146575] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Creating folder: Instances. Parent ref: group-v18240. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 980.146715] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-532beb49-0816-484a-ab78-0d81ec324c5a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.161548] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Created folder: Instances in parent group-v18240. [ 980.161837] env[61868]: DEBUG oslo.service.loopingcall [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 980.162052] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 980.162718] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4e4ea908-c672-4d35-919c-237a9174b248 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.199386] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 980.199386] env[61868]: value = "task-41005" [ 980.199386] env[61868]: _type = "Task" [ 980.199386] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.207848] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41005, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.709334] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41005, 'name': CreateVM_Task, 'duration_secs': 0.319785} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.709531] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 980.710168] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 980.710405] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 980.713280] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af393b9-9876-4963-9833-39ca63d04053 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.746504] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Reconfiguring VM instance to enable vnc on port - 5907 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 980.746853] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-863ec687-69df-4f66-90ee-e2a4bdcf30ff {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.764869] env[61868]: DEBUG oslo_vmware.api [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for the task: (returnval){ [ 980.764869] env[61868]: value = "task-41006" [ 980.764869] env[61868]: _type = "Task" [ 980.764869] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.773360] env[61868]: DEBUG oslo_vmware.api [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': task-41006, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.274501] env[61868]: DEBUG oslo_vmware.api [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': task-41006, 'name': ReconfigVM_Task, 'duration_secs': 0.115453} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.275257] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Reconfigured VM instance to enable vnc on port - 5907 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 981.275600] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.565s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 981.275964] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 981.276270] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 981.276702] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 981.277096] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e0c2710-62c3-4087-ae06-1fedaea120c7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.283537] env[61868]: DEBUG oslo_vmware.api [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for the task: (returnval){ [ 981.283537] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]5271ebaf-e65c-a3c5-da94-bd45fdb14189" [ 981.283537] env[61868]: _type = "Task" [ 981.283537] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.293902] env[61868]: DEBUG oslo_vmware.api [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]5271ebaf-e65c-a3c5-da94-bd45fdb14189, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.794189] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 981.794457] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 981.794667] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 982.204226] env[61868]: DEBUG nova.compute.manager [req-89062633-fc10-42b3-93c9-227da8f1c7a1 req-bd6b5c7d-494c-4456-a64a-41c65e728310 service nova] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Received event network-changed-9f030bea-e7a1-48fe-901d-cac6c72e7e40 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 982.204415] env[61868]: DEBUG nova.compute.manager [req-89062633-fc10-42b3-93c9-227da8f1c7a1 req-bd6b5c7d-494c-4456-a64a-41c65e728310 service nova] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Refreshing instance network info cache due to event network-changed-9f030bea-e7a1-48fe-901d-cac6c72e7e40. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 982.204623] env[61868]: DEBUG oslo_concurrency.lockutils [req-89062633-fc10-42b3-93c9-227da8f1c7a1 req-bd6b5c7d-494c-4456-a64a-41c65e728310 service nova] Acquiring lock "refresh_cache-26f77431-9a5d-444d-b345-10108c34b59b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 982.204760] env[61868]: DEBUG oslo_concurrency.lockutils [req-89062633-fc10-42b3-93c9-227da8f1c7a1 req-bd6b5c7d-494c-4456-a64a-41c65e728310 service nova] Acquired lock "refresh_cache-26f77431-9a5d-444d-b345-10108c34b59b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 982.204914] env[61868]: DEBUG nova.network.neutron [req-89062633-fc10-42b3-93c9-227da8f1c7a1 req-bd6b5c7d-494c-4456-a64a-41c65e728310 service nova] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Refreshing network info cache for port 9f030bea-e7a1-48fe-901d-cac6c72e7e40 {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 982.537483] env[61868]: DEBUG nova.network.neutron [req-89062633-fc10-42b3-93c9-227da8f1c7a1 req-bd6b5c7d-494c-4456-a64a-41c65e728310 service nova] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Updated VIF entry in instance network info cache for port 9f030bea-e7a1-48fe-901d-cac6c72e7e40. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 982.537832] env[61868]: DEBUG nova.network.neutron [req-89062633-fc10-42b3-93c9-227da8f1c7a1 req-bd6b5c7d-494c-4456-a64a-41c65e728310 service nova] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Updating instance_info_cache with network_info: [{"id": "9f030bea-e7a1-48fe-901d-cac6c72e7e40", "address": "fa:16:3e:8a:1a:05", "network": {"id": "843ef760-8abf-4cfe-bcb8-9ec2e65dca2f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1286639074-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "01d5fac165e449d49cd6e9d9c7e9d116", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f030bea-e7", "ovs_interfaceid": "9f030bea-e7a1-48fe-901d-cac6c72e7e40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.549132] env[61868]: DEBUG oslo_concurrency.lockutils [req-89062633-fc10-42b3-93c9-227da8f1c7a1 req-bd6b5c7d-494c-4456-a64a-41c65e728310 service nova] Releasing lock "refresh_cache-26f77431-9a5d-444d-b345-10108c34b59b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 983.929387] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Acquiring lock "a7371133-1ff3-4016-84fc-a59a9ef6d445" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 983.929747] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Lock "a7371133-1ff3-4016-84fc-a59a9ef6d445" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 999.624731] env[61868]: DEBUG oslo_concurrency.lockutils [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Acquiring lock "b2dbce45-4bfa-4356-b608-e44e5a15c081" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 999.624731] env[61868]: DEBUG oslo_concurrency.lockutils [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Lock "b2dbce45-4bfa-4356-b608-e44e5a15c081" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1000.349337] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1002.554814] env[61868]: DEBUG oslo_concurrency.lockutils [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "8ee97cc3-458c-4fd5-8f67-f7e877b44b0d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1002.555143] env[61868]: DEBUG oslo_concurrency.lockutils [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "8ee97cc3-458c-4fd5-8f67-f7e877b44b0d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1003.350797] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1006.351905] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1007.351270] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1007.351520] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1007.365678] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1007.365965] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1007.366041] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1007.366199] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1007.367327] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-169d7c80-4338-48fe-98a5-4241e0a92547 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.376994] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f32d2c-8067-4a57-8c91-217f2e68cd1a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.396859] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-390d1884-1e9e-4a07-b18a-dbef1b39b720 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.405573] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef3bc51-b2cb-4c32-af20-c9c7f04907fe {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.440248] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181953MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1007.440521] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1007.440981] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1007.522981] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4cfa680a-0ea3-4c40-b89c-b6067397427a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1007.523243] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ffe7c98f-1367-44fa-b8a2-f34b8de7dfde actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1007.523285] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4ed52e2d-018f-4405-9380-0c7f62ef2db3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1007.523371] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 2466fe4e-2589-4417-a63a-4d8bc695109d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1007.523571] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 1df194bf-fa9b-4d03-9b20-8478147de566 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1007.523722] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4cec72dc-99c1-4cf9-b391-a909bab7fb23 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1007.523842] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 972ab1c7-03b0-4294-930c-8084674083ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1007.523978] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d6ac9ed4-56dd-493a-8d9f-0cfad210b6de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1007.524136] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 3394162c-605f-40a1-9dc8-dc5cba6a083f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1007.524293] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 26f77431-9a5d-444d-b345-10108c34b59b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1007.539555] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 13ffc1dc-81be-40bc-94cf-b9ac06d98511 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.554256] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 98afcb58-e992-469d-a8fd-94c5eaf69b31 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.565549] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance bb3714cc-7f34-4a94-b682-aefda6f48ed3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.577597] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 0ef31ff5-f19f-44b0-abb2-b92c76ee9b15 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.592537] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 02387068-d9d8-4a13-b819-e1b9ef57a5d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.607252] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 7958ae5c-6db5-4491-8e32-cfd2cab686d3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.641466] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 62c7b41d-1426-408b-a650-4f567171256a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.657158] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ad906b04-0b02-444a-b837-acde1c22df43 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.671307] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance bf46241a-b11c-46e7-b463-c48bc83c8ab2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.683919] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance a085b185-6663-460d-b2c8-9acee0a89311 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.697856] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 70ad4510-e8e3-4c1a-a58d-d76822248b9a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.708876] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 57439eb9-5e3d-49e7-a634-24cb78d86c99 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.721038] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 9a7f9e74-a298-4eee-bf31-153d671ab91a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.732981] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 0e19eccc-446c-48c0-9428-54b71f0b03da has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.747273] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance a8e7708c-b9ee-465b-8df8-798983c6f06c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.762510] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance a7371133-1ff3-4016-84fc-a59a9ef6d445 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.778425] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance b2dbce45-4bfa-4356-b608-e44e5a15c081 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.789971] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1007.790241] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1007.790383] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1008.275926] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5f6eb5e-6d45-478b-a2af-6951628536d3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.284386] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb464a6-d3c9-439a-b833-4c425f11ed54 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.317416] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75ff5bab-48a9-473e-9d39-0be06e408005 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.325580] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-615d55ff-2cfa-4d07-bbb5-ee6cd1633941 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.340094] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1008.349640] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1008.367058] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1008.367358] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.926s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1009.362448] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1009.362691] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1009.362845] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1009.362963] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 1009.387850] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1009.388211] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1009.388211] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1009.388271] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1009.388381] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1009.388498] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1009.388612] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1009.388762] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1009.388892] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1009.389012] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1009.389130] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 1009.389658] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1009.389825] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1010.351284] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1010.351457] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 1011.835878] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5793acf2-2a6e-4aa6-865a-b29e18e7c107 tempest-ServersAaction247Test-821762318 tempest-ServersAaction247Test-821762318-project-member] Acquiring lock "6ff71cf5-4473-4e25-b6f0-b3da104ed9ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1011.839002] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5793acf2-2a6e-4aa6-865a-b29e18e7c107 tempest-ServersAaction247Test-821762318 tempest-ServersAaction247Test-821762318-project-member] Lock "6ff71cf5-4473-4e25-b6f0-b3da104ed9ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1013.611284] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c7de2917-7b6a-4274-a6c0-456ec5d94c4f tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Acquiring lock "3394162c-605f-40a1-9dc8-dc5cba6a083f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1014.712683] env[61868]: DEBUG oslo_concurrency.lockutils [None req-83b9e23e-76e1-4b71-acf0-313c6d45d1c0 tempest-ServerRescueTestJSON-931646452 tempest-ServerRescueTestJSON-931646452-project-member] Acquiring lock "d59d2416-ac39-4e06-a20c-b6f392da4af2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1014.712683] env[61868]: DEBUG oslo_concurrency.lockutils [None req-83b9e23e-76e1-4b71-acf0-313c6d45d1c0 tempest-ServerRescueTestJSON-931646452 tempest-ServerRescueTestJSON-931646452-project-member] Lock "d59d2416-ac39-4e06-a20c-b6f392da4af2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1016.110197] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d86bf205-ddd3-447f-89d4-aff3668a2089 tempest-ListServersNegativeTestJSON-1840204747 tempest-ListServersNegativeTestJSON-1840204747-project-member] Acquiring lock "efbd7994-e03a-40ab-978d-316667f3e43c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1016.110534] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d86bf205-ddd3-447f-89d4-aff3668a2089 tempest-ListServersNegativeTestJSON-1840204747 tempest-ListServersNegativeTestJSON-1840204747-project-member] Lock "efbd7994-e03a-40ab-978d-316667f3e43c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1016.152553] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d86bf205-ddd3-447f-89d4-aff3668a2089 tempest-ListServersNegativeTestJSON-1840204747 tempest-ListServersNegativeTestJSON-1840204747-project-member] Acquiring lock "bbbde7a6-3107-46b9-b2c9-a4873916f7b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1016.152807] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d86bf205-ddd3-447f-89d4-aff3668a2089 tempest-ListServersNegativeTestJSON-1840204747 tempest-ListServersNegativeTestJSON-1840204747-project-member] Lock "bbbde7a6-3107-46b9-b2c9-a4873916f7b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1016.215048] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d86bf205-ddd3-447f-89d4-aff3668a2089 tempest-ListServersNegativeTestJSON-1840204747 tempest-ListServersNegativeTestJSON-1840204747-project-member] Acquiring lock "652af8bc-d4b9-4a5a-bba5-7429e659133b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1016.215260] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d86bf205-ddd3-447f-89d4-aff3668a2089 tempest-ListServersNegativeTestJSON-1840204747 tempest-ListServersNegativeTestJSON-1840204747-project-member] Lock "652af8bc-d4b9-4a5a-bba5-7429e659133b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1025.554282] env[61868]: WARNING oslo_vmware.rw_handles [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1025.554282] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1025.554282] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1025.554282] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1025.554282] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1025.554282] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 1025.554282] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1025.554282] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1025.554282] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1025.554282] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1025.554282] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1025.554282] env[61868]: ERROR oslo_vmware.rw_handles [ 1025.555190] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/4928d747-ec7e-4fdd-83af-22395b4972c1/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1025.556765] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1025.556870] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Copying Virtual Disk [datastore2] vmware_temp/4928d747-ec7e-4fdd-83af-22395b4972c1/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/4928d747-ec7e-4fdd-83af-22395b4972c1/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1025.557180] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b8477281-1468-4c62-9b00-8c6d58f600f3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.566468] env[61868]: DEBUG oslo_vmware.api [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Waiting for the task: (returnval){ [ 1025.566468] env[61868]: value = "task-41015" [ 1025.566468] env[61868]: _type = "Task" [ 1025.566468] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.575065] env[61868]: DEBUG oslo_vmware.api [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Task: {'id': task-41015, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.078276] env[61868]: DEBUG oslo_vmware.exceptions [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1026.078516] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1026.079379] env[61868]: ERROR nova.compute.manager [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1026.079379] env[61868]: Faults: ['InvalidArgument'] [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Traceback (most recent call last): [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] yield resources [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] self.driver.spawn(context, instance, image_meta, [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] self._fetch_image_if_missing(context, vi) [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] image_cache(vi, tmp_image_ds_loc) [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] vm_util.copy_virtual_disk( [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] session._wait_for_task(vmdk_copy_task) [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] return self.wait_for_task(task_ref) [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] return evt.wait() [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] result = hub.switch() [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] return self.greenlet.switch() [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] self.f(*self.args, **self.kw) [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] raise exceptions.translate_fault(task_info.error) [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Faults: ['InvalidArgument'] [ 1026.079379] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] [ 1026.080514] env[61868]: INFO nova.compute.manager [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Terminating instance [ 1026.083031] env[61868]: DEBUG nova.compute.manager [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1026.083296] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1026.083670] env[61868]: DEBUG oslo_concurrency.lockutils [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1026.083901] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1026.084900] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-460b7a41-0dd5-4184-be7e-37818d760e1e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.087787] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3712cc71-d91c-4931-a52e-096b0e913562 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.096061] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1026.097386] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2dad474a-8d79-4db2-b2ce-468bd5417f17 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.098994] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1026.098994] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1026.099675] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83b60a49-ef6b-496a-8401-1af21c1d5a09 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.105580] env[61868]: DEBUG oslo_vmware.api [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Waiting for the task: (returnval){ [ 1026.105580] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]5259c7e3-b6cd-d8c4-6562-9143e02f4e72" [ 1026.105580] env[61868]: _type = "Task" [ 1026.105580] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.122035] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1026.122313] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Creating directory with path [datastore2] vmware_temp/c6408c35-0c58-46f1-b204-df15388c5aa4/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1026.122714] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f54be8b1-5837-4d22-bedc-c1292454a767 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.145603] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Created directory with path [datastore2] vmware_temp/c6408c35-0c58-46f1-b204-df15388c5aa4/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1026.145848] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Fetch image to [datastore2] vmware_temp/c6408c35-0c58-46f1-b204-df15388c5aa4/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1026.146044] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/c6408c35-0c58-46f1-b204-df15388c5aa4/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1026.147772] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2787113f-65b3-46b8-845c-37bb5bfafc00 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.156180] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a65716-5536-4348-a2a7-7bf281abb7ed {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.167869] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f618c16-9e26-40cd-bc36-225d6483f892 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.173509] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1026.173780] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1026.174015] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Deleting the datastore file [datastore2] 4cfa680a-0ea3-4c40-b89c-b6067397427a {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1026.174677] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-45f8f8ae-2978-452c-95da-33268626415c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.204137] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baf149eb-15b4-41c0-b2ad-9bcbed13b72f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.208258] env[61868]: DEBUG oslo_vmware.api [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Waiting for the task: (returnval){ [ 1026.208258] env[61868]: value = "task-41017" [ 1026.208258] env[61868]: _type = "Task" [ 1026.208258] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.214136] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1403a545-dea3-4039-94c4-f0ce76382606 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.219111] env[61868]: DEBUG oslo_vmware.api [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Task: {'id': task-41017, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.238702] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1026.295740] env[61868]: DEBUG oslo_vmware.rw_handles [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c6408c35-0c58-46f1-b204-df15388c5aa4/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1026.360945] env[61868]: DEBUG oslo_vmware.rw_handles [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1026.361483] env[61868]: DEBUG oslo_vmware.rw_handles [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c6408c35-0c58-46f1-b204-df15388c5aa4/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1026.718821] env[61868]: DEBUG oslo_vmware.api [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Task: {'id': task-41017, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069194} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.719114] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1026.719273] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1026.719438] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1026.719608] env[61868]: INFO nova.compute.manager [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Took 0.64 seconds to destroy the instance on the hypervisor. [ 1026.721748] env[61868]: DEBUG nova.compute.claims [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1026.721936] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1026.722166] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1027.201783] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8362bc2f-7339-4468-b249-e5cdc986655d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.209718] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ab5368-b632-4e4d-9cb1-e78b3f512ce6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.241804] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8976f45b-6347-45b0-9f1a-39877283aa41 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.249871] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bd902e0-51b0-4b1a-8717-da51a1901658 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.263586] env[61868]: DEBUG nova.compute.provider_tree [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1027.271914] env[61868]: DEBUG nova.scheduler.client.report [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1027.289114] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.566s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1027.289114] env[61868]: ERROR nova.compute.manager [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1027.289114] env[61868]: Faults: ['InvalidArgument'] [ 1027.289114] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Traceback (most recent call last): [ 1027.289114] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1027.289114] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] self.driver.spawn(context, instance, image_meta, [ 1027.289114] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1027.289114] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1027.289114] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1027.289114] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] self._fetch_image_if_missing(context, vi) [ 1027.289114] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1027.289114] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] image_cache(vi, tmp_image_ds_loc) [ 1027.289114] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1027.289114] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] vm_util.copy_virtual_disk( [ 1027.289114] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1027.289114] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] session._wait_for_task(vmdk_copy_task) [ 1027.289114] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1027.289114] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] return self.wait_for_task(task_ref) [ 1027.289114] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1027.289114] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] return evt.wait() [ 1027.289114] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1027.289114] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] result = hub.switch() [ 1027.289114] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1027.289114] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] return self.greenlet.switch() [ 1027.289114] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1027.289114] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] self.f(*self.args, **self.kw) [ 1027.289114] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1027.289114] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] raise exceptions.translate_fault(task_info.error) [ 1027.289114] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1027.289114] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Faults: ['InvalidArgument'] [ 1027.289114] env[61868]: ERROR nova.compute.manager [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] [ 1027.290265] env[61868]: DEBUG nova.compute.utils [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1027.291857] env[61868]: DEBUG nova.compute.manager [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Build of instance 4cfa680a-0ea3-4c40-b89c-b6067397427a was re-scheduled: A specified parameter was not correct: fileType [ 1027.291857] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1027.292262] env[61868]: DEBUG nova.compute.manager [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1027.292438] env[61868]: DEBUG nova.compute.manager [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1027.292624] env[61868]: DEBUG nova.compute.manager [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1027.292787] env[61868]: DEBUG nova.network.neutron [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1027.564933] env[61868]: DEBUG nova.network.neutron [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.579279] env[61868]: INFO nova.compute.manager [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Took 0.29 seconds to deallocate network for instance. [ 1027.691477] env[61868]: INFO nova.scheduler.client.report [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Deleted allocations for instance 4cfa680a-0ea3-4c40-b89c-b6067397427a [ 1027.712589] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d4759bab-05e0-4783-b926-37c5813981aa tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Lock "4cfa680a-0ea3-4c40-b89c-b6067397427a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 427.398s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1027.714154] env[61868]: DEBUG oslo_concurrency.lockutils [None req-31a0cc5e-84a9-40ff-8d7e-e842048d93f1 tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Lock "4cfa680a-0ea3-4c40-b89c-b6067397427a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 228.974s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1027.714416] env[61868]: DEBUG oslo_concurrency.lockutils [None req-31a0cc5e-84a9-40ff-8d7e-e842048d93f1 tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Acquiring lock "4cfa680a-0ea3-4c40-b89c-b6067397427a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1027.714776] env[61868]: DEBUG oslo_concurrency.lockutils [None req-31a0cc5e-84a9-40ff-8d7e-e842048d93f1 tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Lock "4cfa680a-0ea3-4c40-b89c-b6067397427a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1027.715173] env[61868]: DEBUG oslo_concurrency.lockutils [None req-31a0cc5e-84a9-40ff-8d7e-e842048d93f1 tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Lock "4cfa680a-0ea3-4c40-b89c-b6067397427a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1027.717183] env[61868]: INFO nova.compute.manager [None req-31a0cc5e-84a9-40ff-8d7e-e842048d93f1 tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Terminating instance [ 1027.720027] env[61868]: DEBUG nova.compute.manager [None req-31a0cc5e-84a9-40ff-8d7e-e842048d93f1 tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1027.720284] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-31a0cc5e-84a9-40ff-8d7e-e842048d93f1 tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1027.721032] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3ae9f860-1e3f-4e71-b9d3-8a63c8f29c39 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.733297] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc58e8c5-9651-4727-a4ec-197982428c64 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.745837] env[61868]: DEBUG nova.compute.manager [None req-1dd17edf-740f-4dd6-8e3f-88ca65d418b5 tempest-VolumesAdminNegativeTest-1661419758 tempest-VolumesAdminNegativeTest-1661419758-project-member] [instance: 13ffc1dc-81be-40bc-94cf-b9ac06d98511] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1027.767530] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-31a0cc5e-84a9-40ff-8d7e-e842048d93f1 tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4cfa680a-0ea3-4c40-b89c-b6067397427a could not be found. [ 1027.767745] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-31a0cc5e-84a9-40ff-8d7e-e842048d93f1 tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1027.767918] env[61868]: INFO nova.compute.manager [None req-31a0cc5e-84a9-40ff-8d7e-e842048d93f1 tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1027.768174] env[61868]: DEBUG oslo.service.loopingcall [None req-31a0cc5e-84a9-40ff-8d7e-e842048d93f1 tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1027.768537] env[61868]: DEBUG nova.compute.manager [-] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1027.768670] env[61868]: DEBUG nova.network.neutron [-] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1027.803095] env[61868]: DEBUG nova.network.neutron [-] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.811753] env[61868]: INFO nova.compute.manager [-] [instance: 4cfa680a-0ea3-4c40-b89c-b6067397427a] Took 0.04 seconds to deallocate network for instance. [ 1027.908161] env[61868]: DEBUG oslo_concurrency.lockutils [None req-31a0cc5e-84a9-40ff-8d7e-e842048d93f1 tempest-ServerActionsTestOtherA-130325156 tempest-ServerActionsTestOtherA-130325156-project-member] Lock "4cfa680a-0ea3-4c40-b89c-b6067397427a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.194s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1028.049996] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c17f2459-b16b-4ca0-a609-e12b6d603cf6 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "26f77431-9a5d-444d-b345-10108c34b59b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1028.802575] env[61868]: DEBUG nova.compute.manager [None req-1dd17edf-740f-4dd6-8e3f-88ca65d418b5 tempest-VolumesAdminNegativeTest-1661419758 tempest-VolumesAdminNegativeTest-1661419758-project-member] [instance: 13ffc1dc-81be-40bc-94cf-b9ac06d98511] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1028.829107] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1dd17edf-740f-4dd6-8e3f-88ca65d418b5 tempest-VolumesAdminNegativeTest-1661419758 tempest-VolumesAdminNegativeTest-1661419758-project-member] Lock "13ffc1dc-81be-40bc-94cf-b9ac06d98511" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.412s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1028.841730] env[61868]: DEBUG nova.compute.manager [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1028.910818] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1028.911110] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1028.912966] env[61868]: INFO nova.compute.claims [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1029.354301] env[61868]: DEBUG oslo_concurrency.lockutils [None req-81a398d8-d180-45e0-ab03-2b705023cf58 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] Acquiring lock "98afcb58-e992-469d-a8fd-94c5eaf69b31" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1029.465803] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e3fe9ee-f6a2-4b61-ba70-2b0f35ac34e6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.473980] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fec508e-985f-4aaa-9d07-9c62dc42c8c3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.515221] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b638caee-fb3a-464b-bc9a-c9f907575f87 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.523640] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d67ec3d4-d000-461b-8d1a-6d2c79f6b21c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.538169] env[61868]: DEBUG nova.compute.provider_tree [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1029.552038] env[61868]: DEBUG nova.scheduler.client.report [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1029.573186] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.662s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1029.573713] env[61868]: DEBUG nova.compute.manager [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1029.616805] env[61868]: DEBUG nova.compute.claims [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1029.616995] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1029.617226] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1030.122362] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aa1037f-74c7-4f12-ae64-b56d5232d90d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.131020] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c6c491a-ed3d-4733-8b1a-c95fc8f40f65 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.170424] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-857e5631-b407-4648-803e-8ab1f9bb2797 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.179175] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1454e546-3bbf-46b2-9d9d-f0078b7a4823 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.195618] env[61868]: DEBUG nova.compute.provider_tree [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1030.204634] env[61868]: DEBUG nova.scheduler.client.report [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1030.221966] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.605s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1030.222833] env[61868]: DEBUG nova.compute.utils [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] Conflict updating instance 98afcb58-e992-469d-a8fd-94c5eaf69b31. Expected: {'task_state': [None]}. Actual: {'task_state': 'deleting'} {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1030.224398] env[61868]: DEBUG nova.compute.manager [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] Instance disappeared during build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2504}} [ 1030.224658] env[61868]: DEBUG nova.compute.manager [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1030.224946] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] Acquiring lock "refresh_cache-98afcb58-e992-469d-a8fd-94c5eaf69b31" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1030.225151] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] Acquired lock "refresh_cache-98afcb58-e992-469d-a8fd-94c5eaf69b31" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1030.225385] env[61868]: DEBUG nova.network.neutron [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1030.263910] env[61868]: DEBUG nova.network.neutron [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1030.363769] env[61868]: DEBUG nova.network.neutron [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.375668] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] Releasing lock "refresh_cache-98afcb58-e992-469d-a8fd-94c5eaf69b31" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1030.375668] env[61868]: DEBUG nova.compute.manager [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1030.375812] env[61868]: DEBUG nova.compute.manager [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1030.375943] env[61868]: DEBUG nova.network.neutron [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1030.397318] env[61868]: DEBUG nova.network.neutron [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1030.404783] env[61868]: DEBUG nova.network.neutron [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.413058] env[61868]: INFO nova.compute.manager [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] Took 0.04 seconds to deallocate network for instance. [ 1030.487488] env[61868]: INFO nova.scheduler.client.report [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] Deleted allocations for instance 98afcb58-e992-469d-a8fd-94c5eaf69b31 [ 1030.487887] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8a10eabc-4629-4db1-838a-641de5840d12 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] Lock "98afcb58-e992-469d-a8fd-94c5eaf69b31" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.897s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1030.489478] env[61868]: DEBUG oslo_concurrency.lockutils [None req-81a398d8-d180-45e0-ab03-2b705023cf58 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] Lock "98afcb58-e992-469d-a8fd-94c5eaf69b31" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 1.136s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1030.489702] env[61868]: DEBUG oslo_concurrency.lockutils [None req-81a398d8-d180-45e0-ab03-2b705023cf58 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] Acquiring lock "98afcb58-e992-469d-a8fd-94c5eaf69b31-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1030.489938] env[61868]: DEBUG oslo_concurrency.lockutils [None req-81a398d8-d180-45e0-ab03-2b705023cf58 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] Lock "98afcb58-e992-469d-a8fd-94c5eaf69b31-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1030.490145] env[61868]: DEBUG oslo_concurrency.lockutils [None req-81a398d8-d180-45e0-ab03-2b705023cf58 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] Lock "98afcb58-e992-469d-a8fd-94c5eaf69b31-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1030.493070] env[61868]: INFO nova.compute.manager [None req-81a398d8-d180-45e0-ab03-2b705023cf58 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] Terminating instance [ 1030.494789] env[61868]: DEBUG oslo_concurrency.lockutils [None req-81a398d8-d180-45e0-ab03-2b705023cf58 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] Acquiring lock "refresh_cache-98afcb58-e992-469d-a8fd-94c5eaf69b31" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1030.494936] env[61868]: DEBUG oslo_concurrency.lockutils [None req-81a398d8-d180-45e0-ab03-2b705023cf58 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] Acquired lock "refresh_cache-98afcb58-e992-469d-a8fd-94c5eaf69b31" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1030.495097] env[61868]: DEBUG nova.network.neutron [None req-81a398d8-d180-45e0-ab03-2b705023cf58 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1030.502159] env[61868]: DEBUG nova.compute.manager [None req-be2532e6-53db-462e-9d99-f7efa5c5c033 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f630ce29-dcb1-4f1c-9dc4-0cb246e6f1cb] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1030.526379] env[61868]: DEBUG nova.network.neutron [None req-81a398d8-d180-45e0-ab03-2b705023cf58 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1030.529626] env[61868]: DEBUG nova.compute.manager [None req-be2532e6-53db-462e-9d99-f7efa5c5c033 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f630ce29-dcb1-4f1c-9dc4-0cb246e6f1cb] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1030.554163] env[61868]: DEBUG oslo_concurrency.lockutils [None req-be2532e6-53db-462e-9d99-f7efa5c5c033 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "f630ce29-dcb1-4f1c-9dc4-0cb246e6f1cb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 196.430s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1030.572020] env[61868]: DEBUG nova.compute.manager [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1030.582808] env[61868]: DEBUG nova.network.neutron [None req-81a398d8-d180-45e0-ab03-2b705023cf58 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.593175] env[61868]: DEBUG oslo_concurrency.lockutils [None req-81a398d8-d180-45e0-ab03-2b705023cf58 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] Releasing lock "refresh_cache-98afcb58-e992-469d-a8fd-94c5eaf69b31" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1030.593585] env[61868]: DEBUG nova.compute.manager [None req-81a398d8-d180-45e0-ab03-2b705023cf58 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1030.593777] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-81a398d8-d180-45e0-ab03-2b705023cf58 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1030.594286] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aed7773d-3b9d-49b1-b7be-b695eae229a2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.605447] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ae905a-01f5-442e-8285-9e06a18827bd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.354926] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-81a398d8-d180-45e0-ab03-2b705023cf58 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 98afcb58-e992-469d-a8fd-94c5eaf69b31 could not be found. [ 1031.355295] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-81a398d8-d180-45e0-ab03-2b705023cf58 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1031.355340] env[61868]: INFO nova.compute.manager [None req-81a398d8-d180-45e0-ab03-2b705023cf58 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] Took 0.76 seconds to destroy the instance on the hypervisor. [ 1031.355541] env[61868]: DEBUG oslo.service.loopingcall [None req-81a398d8-d180-45e0-ab03-2b705023cf58 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1031.356619] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1031.356848] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1031.358299] env[61868]: INFO nova.compute.claims [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1031.361225] env[61868]: DEBUG nova.compute.manager [-] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1031.361336] env[61868]: DEBUG nova.network.neutron [-] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1031.381633] env[61868]: DEBUG nova.network.neutron [-] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1031.391118] env[61868]: DEBUG nova.network.neutron [-] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.408029] env[61868]: INFO nova.compute.manager [-] [instance: 98afcb58-e992-469d-a8fd-94c5eaf69b31] Took 0.04 seconds to deallocate network for instance. [ 1031.520167] env[61868]: DEBUG oslo_concurrency.lockutils [None req-81a398d8-d180-45e0-ab03-2b705023cf58 tempest-InstanceActionsNegativeTestJSON-827282731 tempest-InstanceActionsNegativeTestJSON-827282731-project-member] Lock "98afcb58-e992-469d-a8fd-94c5eaf69b31" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 1.031s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1031.867173] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd0afc7b-5ca7-406b-b0c2-397c130d4287 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.875460] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0bd8b4c-511b-4166-bbed-5fb619a14b4b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.908377] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e25b46c-0b91-4966-ac66-dcbba59e5363 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.916347] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef1ea62-32a3-474a-9564-b102323c9ccc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.931152] env[61868]: DEBUG nova.compute.provider_tree [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.939668] env[61868]: DEBUG nova.scheduler.client.report [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1031.957755] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.601s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1031.958229] env[61868]: DEBUG nova.compute.manager [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1032.007711] env[61868]: DEBUG nova.compute.utils [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1032.009070] env[61868]: DEBUG nova.compute.manager [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1032.009246] env[61868]: DEBUG nova.network.neutron [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1032.020161] env[61868]: DEBUG nova.compute.manager [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1032.095624] env[61868]: DEBUG nova.compute.manager [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1032.120374] env[61868]: DEBUG nova.virt.hardware [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1032.120628] env[61868]: DEBUG nova.virt.hardware [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1032.120813] env[61868]: DEBUG nova.virt.hardware [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1032.121009] env[61868]: DEBUG nova.virt.hardware [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1032.121157] env[61868]: DEBUG nova.virt.hardware [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1032.121303] env[61868]: DEBUG nova.virt.hardware [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1032.121509] env[61868]: DEBUG nova.virt.hardware [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1032.121669] env[61868]: DEBUG nova.virt.hardware [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1032.121835] env[61868]: DEBUG nova.virt.hardware [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1032.121994] env[61868]: DEBUG nova.virt.hardware [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1032.122165] env[61868]: DEBUG nova.virt.hardware [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1032.123442] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59cece03-ee68-4738-be54-cd5f10409040 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.133499] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d851c2-7440-407c-af00-ffa497f75c2d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.228482] env[61868]: DEBUG nova.policy [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '42563ff3e832401b9c7a69c9a3feebaa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a33cb95d89ad4e1c8aacebb2a9e16009', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 1032.559458] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Acquiring lock "16eb032d-fe34-4a46-883c-8b937806d63f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1032.559458] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Lock "16eb032d-fe34-4a46-883c-8b937806d63f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1032.897433] env[61868]: DEBUG nova.network.neutron [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Successfully created port: 3a77fa6f-8925-4ad5-8f96-8def9d915ecd {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1034.262346] env[61868]: DEBUG nova.compute.manager [req-c2a75186-873d-465d-9f06-c2b029b46891 req-f361fd2a-0f84-4e4a-8f27-8e93451959af service nova] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Received event network-vif-plugged-3a77fa6f-8925-4ad5-8f96-8def9d915ecd {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1034.262599] env[61868]: DEBUG oslo_concurrency.lockutils [req-c2a75186-873d-465d-9f06-c2b029b46891 req-f361fd2a-0f84-4e4a-8f27-8e93451959af service nova] Acquiring lock "bb3714cc-7f34-4a94-b682-aefda6f48ed3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1034.262782] env[61868]: DEBUG oslo_concurrency.lockutils [req-c2a75186-873d-465d-9f06-c2b029b46891 req-f361fd2a-0f84-4e4a-8f27-8e93451959af service nova] Lock "bb3714cc-7f34-4a94-b682-aefda6f48ed3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1034.262946] env[61868]: DEBUG oslo_concurrency.lockutils [req-c2a75186-873d-465d-9f06-c2b029b46891 req-f361fd2a-0f84-4e4a-8f27-8e93451959af service nova] Lock "bb3714cc-7f34-4a94-b682-aefda6f48ed3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1034.263107] env[61868]: DEBUG nova.compute.manager [req-c2a75186-873d-465d-9f06-c2b029b46891 req-f361fd2a-0f84-4e4a-8f27-8e93451959af service nova] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] No waiting events found dispatching network-vif-plugged-3a77fa6f-8925-4ad5-8f96-8def9d915ecd {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1034.263266] env[61868]: WARNING nova.compute.manager [req-c2a75186-873d-465d-9f06-c2b029b46891 req-f361fd2a-0f84-4e4a-8f27-8e93451959af service nova] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Received unexpected event network-vif-plugged-3a77fa6f-8925-4ad5-8f96-8def9d915ecd for instance with vm_state building and task_state spawning. [ 1034.315519] env[61868]: DEBUG nova.network.neutron [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Successfully updated port: 3a77fa6f-8925-4ad5-8f96-8def9d915ecd {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1034.332584] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "refresh_cache-bb3714cc-7f34-4a94-b682-aefda6f48ed3" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1034.332753] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquired lock "refresh_cache-bb3714cc-7f34-4a94-b682-aefda6f48ed3" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1034.332918] env[61868]: DEBUG nova.network.neutron [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1034.430406] env[61868]: DEBUG nova.network.neutron [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1034.750091] env[61868]: DEBUG nova.network.neutron [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Updating instance_info_cache with network_info: [{"id": "3a77fa6f-8925-4ad5-8f96-8def9d915ecd", "address": "fa:16:3e:b3:f8:f4", "network": {"id": "af866156-816c-4427-a004-8bf63f54c0c2", "bridge": "br-int", "label": "tempest-ServersTestJSON-821930294-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "a33cb95d89ad4e1c8aacebb2a9e16009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba6157eb-73cb-428a-9f46-99081165d7eb", "external-id": "nsx-vlan-transportzone-463", "segmentation_id": 463, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a77fa6f-89", "ovs_interfaceid": "3a77fa6f-8925-4ad5-8f96-8def9d915ecd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.774740] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Releasing lock "refresh_cache-bb3714cc-7f34-4a94-b682-aefda6f48ed3" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1034.775051] env[61868]: DEBUG nova.compute.manager [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Instance network_info: |[{"id": "3a77fa6f-8925-4ad5-8f96-8def9d915ecd", "address": "fa:16:3e:b3:f8:f4", "network": {"id": "af866156-816c-4427-a004-8bf63f54c0c2", "bridge": "br-int", "label": "tempest-ServersTestJSON-821930294-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "a33cb95d89ad4e1c8aacebb2a9e16009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba6157eb-73cb-428a-9f46-99081165d7eb", "external-id": "nsx-vlan-transportzone-463", "segmentation_id": 463, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a77fa6f-89", "ovs_interfaceid": "3a77fa6f-8925-4ad5-8f96-8def9d915ecd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1034.775470] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:f8:f4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ba6157eb-73cb-428a-9f46-99081165d7eb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3a77fa6f-8925-4ad5-8f96-8def9d915ecd', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1034.783500] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Creating folder: Project (a33cb95d89ad4e1c8aacebb2a9e16009). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1034.784590] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-592266c0-01ef-4d69-96ee-b837f3d23ca9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.796552] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Created folder: Project (a33cb95d89ad4e1c8aacebb2a9e16009) in parent group-v18181. [ 1034.796747] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Creating folder: Instances. Parent ref: group-v18247. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1034.797023] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2df1ffc8-721e-4f44-b9bc-9b56f323d3ae {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.806654] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Created folder: Instances in parent group-v18247. [ 1034.806897] env[61868]: DEBUG oslo.service.loopingcall [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1034.807165] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1034.807322] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a49c42d1-3e8b-4395-8a2e-3f6f59c74c09 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.830427] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1034.830427] env[61868]: value = "task-41022" [ 1034.830427] env[61868]: _type = "Task" [ 1034.830427] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.839711] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41022, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.342152] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41022, 'name': CreateVM_Task, 'duration_secs': 0.318637} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.342426] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1035.343071] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1035.343572] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1035.347430] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53cbd31f-8e5e-42f8-ae33-682d8a16b5f5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.384121] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Reconfiguring VM instance to enable vnc on port - 5908 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 1035.384582] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ad93cf6-b34f-4b13-b011-e058e4d7e878 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.405949] env[61868]: DEBUG oslo_vmware.api [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Waiting for the task: (returnval){ [ 1035.405949] env[61868]: value = "task-41023" [ 1035.405949] env[61868]: _type = "Task" [ 1035.405949] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.418991] env[61868]: DEBUG oslo_vmware.api [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Task: {'id': task-41023, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.560620] env[61868]: DEBUG oslo_concurrency.lockutils [None req-829fbfc4-ca24-42cc-84ec-82e326dfcd75 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "bb3714cc-7f34-4a94-b682-aefda6f48ed3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1035.917585] env[61868]: DEBUG oslo_vmware.api [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Task: {'id': task-41023, 'name': ReconfigVM_Task, 'duration_secs': 0.109432} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.917930] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Reconfigured VM instance to enable vnc on port - 5908 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 1035.918253] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.575s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1035.919122] env[61868]: DEBUG oslo_vmware.service [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-487d0598-32e8-40b3-85d1-01efbb6c0e4d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.927554] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1035.928199] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1035.928818] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1035.929353] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe51a0b7-2355-4fa0-a3bc-5c1296dc0a8e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.936014] env[61868]: DEBUG oslo_vmware.api [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Waiting for the task: (returnval){ [ 1035.936014] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52461658-b475-0a13-911c-911cdc374ba9" [ 1035.936014] env[61868]: _type = "Task" [ 1035.936014] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.945284] env[61868]: DEBUG oslo_vmware.api [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52461658-b475-0a13-911c-911cdc374ba9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.307986] env[61868]: DEBUG nova.compute.manager [req-2c73a35f-334d-401c-90e3-b39f621d64c0 req-8ee1f316-70f7-4ffe-a3ee-66f74fc7c748 service nova] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Received event network-changed-3a77fa6f-8925-4ad5-8f96-8def9d915ecd {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1036.308251] env[61868]: DEBUG nova.compute.manager [req-2c73a35f-334d-401c-90e3-b39f621d64c0 req-8ee1f316-70f7-4ffe-a3ee-66f74fc7c748 service nova] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Refreshing instance network info cache due to event network-changed-3a77fa6f-8925-4ad5-8f96-8def9d915ecd. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1036.308476] env[61868]: DEBUG oslo_concurrency.lockutils [req-2c73a35f-334d-401c-90e3-b39f621d64c0 req-8ee1f316-70f7-4ffe-a3ee-66f74fc7c748 service nova] Acquiring lock "refresh_cache-bb3714cc-7f34-4a94-b682-aefda6f48ed3" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1036.308619] env[61868]: DEBUG oslo_concurrency.lockutils [req-2c73a35f-334d-401c-90e3-b39f621d64c0 req-8ee1f316-70f7-4ffe-a3ee-66f74fc7c748 service nova] Acquired lock "refresh_cache-bb3714cc-7f34-4a94-b682-aefda6f48ed3" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1036.308785] env[61868]: DEBUG nova.network.neutron [req-2c73a35f-334d-401c-90e3-b39f621d64c0 req-8ee1f316-70f7-4ffe-a3ee-66f74fc7c748 service nova] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Refreshing network info cache for port 3a77fa6f-8925-4ad5-8f96-8def9d915ecd {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1036.453600] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1036.453600] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1036.453600] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1036.453600] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1036.453600] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1036.454021] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f364134c-f425-4f0e-8a16-b532c0856004 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.474981] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1036.475194] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1036.475971] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d96915c-15f8-4b86-80cf-3c7cb35ddb5f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.487642] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83af4437-c8da-4cfa-89b7-1a76286d071f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.502734] env[61868]: DEBUG oslo_vmware.api [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Waiting for the task: (returnval){ [ 1036.502734] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]526f0043-3b5c-d346-4445-bad143e27f03" [ 1036.502734] env[61868]: _type = "Task" [ 1036.502734] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.511826] env[61868]: DEBUG oslo_vmware.api [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]526f0043-3b5c-d346-4445-bad143e27f03, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.762794] env[61868]: DEBUG nova.network.neutron [req-2c73a35f-334d-401c-90e3-b39f621d64c0 req-8ee1f316-70f7-4ffe-a3ee-66f74fc7c748 service nova] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Updated VIF entry in instance network info cache for port 3a77fa6f-8925-4ad5-8f96-8def9d915ecd. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1036.763155] env[61868]: DEBUG nova.network.neutron [req-2c73a35f-334d-401c-90e3-b39f621d64c0 req-8ee1f316-70f7-4ffe-a3ee-66f74fc7c748 service nova] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Updating instance_info_cache with network_info: [{"id": "3a77fa6f-8925-4ad5-8f96-8def9d915ecd", "address": "fa:16:3e:b3:f8:f4", "network": {"id": "af866156-816c-4427-a004-8bf63f54c0c2", "bridge": "br-int", "label": "tempest-ServersTestJSON-821930294-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "a33cb95d89ad4e1c8aacebb2a9e16009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba6157eb-73cb-428a-9f46-99081165d7eb", "external-id": "nsx-vlan-transportzone-463", "segmentation_id": 463, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a77fa6f-89", "ovs_interfaceid": "3a77fa6f-8925-4ad5-8f96-8def9d915ecd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.772823] env[61868]: DEBUG oslo_concurrency.lockutils [req-2c73a35f-334d-401c-90e3-b39f621d64c0 req-8ee1f316-70f7-4ffe-a3ee-66f74fc7c748 service nova] Releasing lock "refresh_cache-bb3714cc-7f34-4a94-b682-aefda6f48ed3" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1037.018431] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1037.018712] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Creating directory with path [datastore1] vmware_temp/f119f70e-a542-41d1-b300-ecfffd872bfc/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1037.018971] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c8051522-efdb-4a4f-9af4-878fe00c8086 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.026412] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2ebced76-abaf-4ace-be06-2fdccdbd8ac5 tempest-ServerShowV257Test-1904292548 tempest-ServerShowV257Test-1904292548-project-member] Acquiring lock "649d7eda-b095-4bb0-962a-acb8dfa50516" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1037.026732] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2ebced76-abaf-4ace-be06-2fdccdbd8ac5 tempest-ServerShowV257Test-1904292548 tempest-ServerShowV257Test-1904292548-project-member] Lock "649d7eda-b095-4bb0-962a-acb8dfa50516" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1037.046723] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Created directory with path [datastore1] vmware_temp/f119f70e-a542-41d1-b300-ecfffd872bfc/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1037.046723] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Fetch image to [datastore1] vmware_temp/f119f70e-a542-41d1-b300-ecfffd872bfc/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1037.046723] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore1] vmware_temp/f119f70e-a542-41d1-b300-ecfffd872bfc/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore1 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1037.046936] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-996502e2-db89-44c5-ab33-3b7fba34fcb6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.054978] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af777a7a-f052-4bc5-bb12-c2ca8bb36f96 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.065537] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5756abe-b6e9-4fec-aba5-75f49b20d214 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.102477] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db64e84-4f3d-4f27-b086-3c8f8e17d7e8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.108607] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-00fef78f-441f-4fb8-b121-fcd2fd5cf5bb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.133543] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore1 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1037.193430] env[61868]: DEBUG oslo_vmware.rw_handles [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f119f70e-a542-41d1-b300-ecfffd872bfc/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1037.262248] env[61868]: DEBUG oslo_vmware.rw_handles [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1037.262431] env[61868]: DEBUG oslo_vmware.rw_handles [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f119f70e-a542-41d1-b300-ecfffd872bfc/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1037.263219] env[61868]: DEBUG oslo_concurrency.lockutils [None req-be7f336c-3c98-4711-8df2-ffc8de027400 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquiring lock "5c20c9b3-467c-4c82-9a30-883a4cd62e8e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1037.263495] env[61868]: DEBUG oslo_concurrency.lockutils [None req-be7f336c-3c98-4711-8df2-ffc8de027400 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Lock "5c20c9b3-467c-4c82-9a30-883a4cd62e8e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1037.984857] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1a007db2-064c-414a-8b4f-cafe6f4cdcf2 tempest-ListServerFiltersTestJSON-535461354 tempest-ListServerFiltersTestJSON-535461354-project-member] Acquiring lock "e0c5de36-1d7f-49a0-a13e-faf0e8ae81a3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1037.985150] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1a007db2-064c-414a-8b4f-cafe6f4cdcf2 tempest-ListServerFiltersTestJSON-535461354 tempest-ListServerFiltersTestJSON-535461354-project-member] Lock "e0c5de36-1d7f-49a0-a13e-faf0e8ae81a3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1038.643113] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c802b8dc-f5ba-41fc-9fa0-d8bbdc48893f tempest-ListServerFiltersTestJSON-535461354 tempest-ListServerFiltersTestJSON-535461354-project-member] Acquiring lock "e44bdea6-05c1-43c9-b019-d762df3a6451" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1038.643349] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c802b8dc-f5ba-41fc-9fa0-d8bbdc48893f tempest-ListServerFiltersTestJSON-535461354 tempest-ListServerFiltersTestJSON-535461354-project-member] Lock "e44bdea6-05c1-43c9-b019-d762df3a6451" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1039.280586] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ff01555b-0604-48be-b780-6caf3bc413a5 tempest-ListServerFiltersTestJSON-535461354 tempest-ListServerFiltersTestJSON-535461354-project-member] Acquiring lock "17b8aca6-ebe8-4a10-b724-2fa03d991d0c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1039.280976] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ff01555b-0604-48be-b780-6caf3bc413a5 tempest-ListServerFiltersTestJSON-535461354 tempest-ListServerFiltersTestJSON-535461354-project-member] Lock "17b8aca6-ebe8-4a10-b724-2fa03d991d0c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1052.626289] env[61868]: DEBUG oslo_concurrency.lockutils [None req-455439a7-de1e-409b-927c-f866a30931e8 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "78dde060-c9e0-4f7d-a012-883b8a82b3b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1052.626583] env[61868]: DEBUG oslo_concurrency.lockutils [None req-455439a7-de1e-409b-927c-f866a30931e8 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "78dde060-c9e0-4f7d-a012-883b8a82b3b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1054.503198] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "4618de15-8f2c-4165-8f23-a4a5542f3d0b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1054.503519] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "4618de15-8f2c-4165-8f23-a4a5542f3d0b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1057.142057] env[61868]: DEBUG oslo_concurrency.lockutils [None req-149bb090-1f18-441d-bf94-873475c650f3 tempest-AttachInterfacesTestJSON-327680650 tempest-AttachInterfacesTestJSON-327680650-project-member] Acquiring lock "31f60d0d-900f-4034-b954-00a219e223e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1057.142380] env[61868]: DEBUG oslo_concurrency.lockutils [None req-149bb090-1f18-441d-bf94-873475c650f3 tempest-AttachInterfacesTestJSON-327680650 tempest-AttachInterfacesTestJSON-327680650-project-member] Lock "31f60d0d-900f-4034-b954-00a219e223e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1062.289209] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c0807466-d316-4988-8385-a1fb7a3d03b8 tempest-AttachVolumeTestJSON-2111320188 tempest-AttachVolumeTestJSON-2111320188-project-member] Acquiring lock "ee1aa6fe-d7bd-412d-b5ae-663032c1b4c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1062.289592] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c0807466-d316-4988-8385-a1fb7a3d03b8 tempest-AttachVolumeTestJSON-2111320188 tempest-AttachVolumeTestJSON-2111320188-project-member] Lock "ee1aa6fe-d7bd-412d-b5ae-663032c1b4c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1064.351979] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1067.351861] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1067.352241] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1068.352194] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1068.352556] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1068.352556] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 1068.375806] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1068.375975] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1068.376135] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1068.376262] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1068.376383] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1068.376540] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1068.376618] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1068.376736] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1068.376855] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1068.376971] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1068.377085] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 1069.351378] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1069.351629] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1069.362667] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1069.362990] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1069.363038] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1069.363189] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1069.364300] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e3e0c0-78a4-4a6a-a8ed-92db37b8e918 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.373196] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35fde34e-7028-453f-9c05-959ab9a676f2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.387151] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9657321-bf3d-4850-aed0-30ab5c9d8a96 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.394171] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac1a4753-cdfb-4688-b655-6aa574ab342f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.424094] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181941MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1069.424259] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1069.424452] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1069.492605] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ffe7c98f-1367-44fa-b8a2-f34b8de7dfde actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1069.492838] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4ed52e2d-018f-4405-9380-0c7f62ef2db3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1069.492960] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 2466fe4e-2589-4417-a63a-4d8bc695109d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1069.493102] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 1df194bf-fa9b-4d03-9b20-8478147de566 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1069.493220] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4cec72dc-99c1-4cf9-b391-a909bab7fb23 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1069.493555] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 972ab1c7-03b0-4294-930c-8084674083ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1069.493555] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d6ac9ed4-56dd-493a-8d9f-0cfad210b6de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1069.493646] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 3394162c-605f-40a1-9dc8-dc5cba6a083f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1069.493682] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 26f77431-9a5d-444d-b345-10108c34b59b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1069.493782] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance bb3714cc-7f34-4a94-b682-aefda6f48ed3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1069.505477] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 0e19eccc-446c-48c0-9428-54b71f0b03da has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1069.522789] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance a8e7708c-b9ee-465b-8df8-798983c6f06c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1069.534414] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance a7371133-1ff3-4016-84fc-a59a9ef6d445 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1069.546414] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance b2dbce45-4bfa-4356-b608-e44e5a15c081 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1069.556975] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1069.568424] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 6ff71cf5-4473-4e25-b6f0-b3da104ed9ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1069.578846] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d59d2416-ac39-4e06-a20c-b6f392da4af2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1069.590493] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance efbd7994-e03a-40ab-978d-316667f3e43c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1069.600345] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance bbbde7a6-3107-46b9-b2c9-a4873916f7b6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1069.610203] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 652af8bc-d4b9-4a5a-bba5-7429e659133b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1069.620528] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 16eb032d-fe34-4a46-883c-8b937806d63f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1069.630103] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 649d7eda-b095-4bb0-962a-acb8dfa50516 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1069.639987] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 5c20c9b3-467c-4c82-9a30-883a4cd62e8e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1069.649746] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance e0c5de36-1d7f-49a0-a13e-faf0e8ae81a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1069.659279] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance e44bdea6-05c1-43c9-b019-d762df3a6451 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1069.668550] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 17b8aca6-ebe8-4a10-b724-2fa03d991d0c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1069.678165] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4618de15-8f2c-4165-8f23-a4a5542f3d0b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1069.693244] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 31f60d0d-900f-4034-b954-00a219e223e7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1069.703739] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ee1aa6fe-d7bd-412d-b5ae-663032c1b4c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1069.703976] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1069.704184] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1070.129198] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a792f3a7-c237-4e2a-a759-7687844b865a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.137461] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c20d2c8-11ea-4396-b4d6-58ecbc8c4b7d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.171953] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bb9f66b-f35b-4a68-b06f-a689cac185d4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.180903] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5216569e-ab85-4000-bf02-6431bb00cefd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.196771] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1070.207077] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1070.226799] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1070.227057] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.803s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1071.222560] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1071.222868] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1071.222966] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1071.223113] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 1076.168593] env[61868]: WARNING oslo_vmware.rw_handles [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1076.168593] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1076.168593] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1076.168593] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1076.168593] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1076.168593] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 1076.168593] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1076.168593] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1076.168593] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1076.168593] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1076.168593] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1076.168593] env[61868]: ERROR oslo_vmware.rw_handles [ 1076.169541] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/c6408c35-0c58-46f1-b204-df15388c5aa4/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1076.171166] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1076.171421] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Copying Virtual Disk [datastore2] vmware_temp/c6408c35-0c58-46f1-b204-df15388c5aa4/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/c6408c35-0c58-46f1-b204-df15388c5aa4/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1076.171739] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1d34b790-07fb-4d31-9c55-3d11da730a55 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.183642] env[61868]: DEBUG oslo_vmware.api [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Waiting for the task: (returnval){ [ 1076.183642] env[61868]: value = "task-41034" [ 1076.183642] env[61868]: _type = "Task" [ 1076.183642] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.192847] env[61868]: DEBUG oslo_vmware.api [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Task: {'id': task-41034, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.695104] env[61868]: DEBUG oslo_vmware.exceptions [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1076.695389] env[61868]: DEBUG oslo_concurrency.lockutils [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1076.695944] env[61868]: ERROR nova.compute.manager [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1076.695944] env[61868]: Faults: ['InvalidArgument'] [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Traceback (most recent call last): [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] yield resources [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] self.driver.spawn(context, instance, image_meta, [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] self._fetch_image_if_missing(context, vi) [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] image_cache(vi, tmp_image_ds_loc) [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] vm_util.copy_virtual_disk( [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] session._wait_for_task(vmdk_copy_task) [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] return self.wait_for_task(task_ref) [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] return evt.wait() [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] result = hub.switch() [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] return self.greenlet.switch() [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] self.f(*self.args, **self.kw) [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] raise exceptions.translate_fault(task_info.error) [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Faults: ['InvalidArgument'] [ 1076.695944] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] [ 1076.697346] env[61868]: INFO nova.compute.manager [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Terminating instance [ 1076.698209] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1076.698419] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1076.698681] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4bf74b90-f2f6-4c6e-bd46-433495b50cb8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.701201] env[61868]: DEBUG nova.compute.manager [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1076.701299] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1076.702081] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1931cc08-24fa-4d1d-85de-0a25ab383470 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.710438] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1076.710768] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-224f3424-2657-4215-be31-e4021b56de5d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.713595] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1076.713762] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1076.714861] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4cbc272-58a6-4c3a-9ca7-58ccad1ad4a4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.720871] env[61868]: DEBUG oslo_vmware.api [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Waiting for the task: (returnval){ [ 1076.720871] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52519045-81e5-ae82-0899-47b66b9686bc" [ 1076.720871] env[61868]: _type = "Task" [ 1076.720871] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.730795] env[61868]: DEBUG oslo_vmware.api [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52519045-81e5-ae82-0899-47b66b9686bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.793498] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1076.793761] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1076.793884] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Deleting the datastore file [datastore2] ffe7c98f-1367-44fa-b8a2-f34b8de7dfde {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1076.795542] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-744bec42-385f-4bbe-9920-3f05025ca474 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.800758] env[61868]: DEBUG oslo_vmware.api [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Waiting for the task: (returnval){ [ 1076.800758] env[61868]: value = "task-41036" [ 1076.800758] env[61868]: _type = "Task" [ 1076.800758] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.809484] env[61868]: DEBUG oslo_vmware.api [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Task: {'id': task-41036, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.232520] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1077.233688] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Creating directory with path [datastore2] vmware_temp/46aac19c-4fea-4511-b47f-fadb55681a1c/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1077.234438] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4311af41-01fd-4946-a6eb-8eb2461cf56a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.249352] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Created directory with path [datastore2] vmware_temp/46aac19c-4fea-4511-b47f-fadb55681a1c/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1077.249897] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Fetch image to [datastore2] vmware_temp/46aac19c-4fea-4511-b47f-fadb55681a1c/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1077.250265] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/46aac19c-4fea-4511-b47f-fadb55681a1c/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1077.251311] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1801e4b8-7c87-4e59-89c9-60910c562023 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.259334] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6678a758-d017-48a9-94ed-331ec1ae993f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.269870] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec8b158-c67e-4782-9a1f-c350afd3803c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.307837] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c54eb528-39b1-4678-9534-c830f0dab27d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.320085] env[61868]: DEBUG oslo_vmware.api [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Task: {'id': task-41036, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069616} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.320331] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-33d0f606-8e34-4a36-aa66-9fe2850c3298 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.322417] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1077.322612] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1077.322791] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1077.322972] env[61868]: INFO nova.compute.manager [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1077.325285] env[61868]: DEBUG nova.compute.claims [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1077.325506] env[61868]: DEBUG oslo_concurrency.lockutils [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1077.325767] env[61868]: DEBUG oslo_concurrency.lockutils [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1077.352083] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1077.414703] env[61868]: DEBUG oslo_vmware.rw_handles [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/46aac19c-4fea-4511-b47f-fadb55681a1c/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1077.472798] env[61868]: DEBUG oslo_vmware.rw_handles [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1077.472989] env[61868]: DEBUG oslo_vmware.rw_handles [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/46aac19c-4fea-4511-b47f-fadb55681a1c/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1077.831596] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82967bd4-d863-4136-ac94-7ad0923ed355 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.848034] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d91d9770-697d-4b86-98e4-9ce72fb871d9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.879514] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aced4ac-15d4-4ae4-ba72-f78b179d7b9f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.887969] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4926a050-ece8-4eaf-950a-000f65027cbe {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.902142] env[61868]: DEBUG nova.compute.provider_tree [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1077.911296] env[61868]: DEBUG nova.scheduler.client.report [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1077.928321] env[61868]: DEBUG oslo_concurrency.lockutils [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.602s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1077.928896] env[61868]: ERROR nova.compute.manager [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1077.928896] env[61868]: Faults: ['InvalidArgument'] [ 1077.928896] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Traceback (most recent call last): [ 1077.928896] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1077.928896] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] self.driver.spawn(context, instance, image_meta, [ 1077.928896] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1077.928896] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1077.928896] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1077.928896] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] self._fetch_image_if_missing(context, vi) [ 1077.928896] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1077.928896] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] image_cache(vi, tmp_image_ds_loc) [ 1077.928896] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1077.928896] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] vm_util.copy_virtual_disk( [ 1077.928896] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1077.928896] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] session._wait_for_task(vmdk_copy_task) [ 1077.928896] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1077.928896] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] return self.wait_for_task(task_ref) [ 1077.928896] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1077.928896] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] return evt.wait() [ 1077.928896] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1077.928896] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] result = hub.switch() [ 1077.928896] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1077.928896] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] return self.greenlet.switch() [ 1077.928896] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1077.928896] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] self.f(*self.args, **self.kw) [ 1077.928896] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1077.928896] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] raise exceptions.translate_fault(task_info.error) [ 1077.928896] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1077.928896] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Faults: ['InvalidArgument'] [ 1077.928896] env[61868]: ERROR nova.compute.manager [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] [ 1077.929905] env[61868]: DEBUG nova.compute.utils [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1077.931202] env[61868]: DEBUG nova.compute.manager [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Build of instance ffe7c98f-1367-44fa-b8a2-f34b8de7dfde was re-scheduled: A specified parameter was not correct: fileType [ 1077.931202] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1077.931683] env[61868]: DEBUG nova.compute.manager [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1077.931882] env[61868]: DEBUG nova.compute.manager [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1077.932066] env[61868]: DEBUG nova.compute.manager [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1077.932236] env[61868]: DEBUG nova.network.neutron [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1078.208242] env[61868]: DEBUG nova.network.neutron [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1078.223918] env[61868]: INFO nova.compute.manager [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Took 0.29 seconds to deallocate network for instance. [ 1078.344886] env[61868]: INFO nova.scheduler.client.report [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Deleted allocations for instance ffe7c98f-1367-44fa-b8a2-f34b8de7dfde [ 1078.369320] env[61868]: DEBUG oslo_concurrency.lockutils [None req-98f9f9ab-af76-495c-afea-16520da4f1c5 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Lock "ffe7c98f-1367-44fa-b8a2-f34b8de7dfde" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 474.894s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1078.370830] env[61868]: DEBUG oslo_concurrency.lockutils [None req-373fd4a0-64d1-45fa-9d2e-bbae1d56bc39 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Lock "ffe7c98f-1367-44fa-b8a2-f34b8de7dfde" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 276.872s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1078.371169] env[61868]: DEBUG oslo_concurrency.lockutils [None req-373fd4a0-64d1-45fa-9d2e-bbae1d56bc39 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Acquiring lock "ffe7c98f-1367-44fa-b8a2-f34b8de7dfde-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1078.371403] env[61868]: DEBUG oslo_concurrency.lockutils [None req-373fd4a0-64d1-45fa-9d2e-bbae1d56bc39 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Lock "ffe7c98f-1367-44fa-b8a2-f34b8de7dfde-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1078.371625] env[61868]: DEBUG oslo_concurrency.lockutils [None req-373fd4a0-64d1-45fa-9d2e-bbae1d56bc39 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Lock "ffe7c98f-1367-44fa-b8a2-f34b8de7dfde-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1078.374263] env[61868]: INFO nova.compute.manager [None req-373fd4a0-64d1-45fa-9d2e-bbae1d56bc39 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Terminating instance [ 1078.376298] env[61868]: DEBUG nova.compute.manager [None req-373fd4a0-64d1-45fa-9d2e-bbae1d56bc39 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1078.376561] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-373fd4a0-64d1-45fa-9d2e-bbae1d56bc39 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1078.377104] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c8e86e83-71c6-49bf-81ad-a881caf81057 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.389860] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef129661-a924-4e4c-8135-e14122abe3cb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.415479] env[61868]: DEBUG nova.compute.manager [None req-55e1f196-e08b-4869-bb81-05aaddbc2d3c tempest-FloatingIPsAssociationNegativeTestJSON-2012629252 tempest-FloatingIPsAssociationNegativeTestJSON-2012629252-project-member] [instance: 0ef31ff5-f19f-44b0-abb2-b92c76ee9b15] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1078.432303] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-373fd4a0-64d1-45fa-9d2e-bbae1d56bc39 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ffe7c98f-1367-44fa-b8a2-f34b8de7dfde could not be found. [ 1078.432723] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-373fd4a0-64d1-45fa-9d2e-bbae1d56bc39 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1078.433015] env[61868]: INFO nova.compute.manager [None req-373fd4a0-64d1-45fa-9d2e-bbae1d56bc39 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1078.433462] env[61868]: DEBUG oslo.service.loopingcall [None req-373fd4a0-64d1-45fa-9d2e-bbae1d56bc39 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1078.434583] env[61868]: DEBUG nova.compute.manager [-] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1078.434775] env[61868]: DEBUG nova.network.neutron [-] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1078.450225] env[61868]: DEBUG nova.compute.manager [None req-55e1f196-e08b-4869-bb81-05aaddbc2d3c tempest-FloatingIPsAssociationNegativeTestJSON-2012629252 tempest-FloatingIPsAssociationNegativeTestJSON-2012629252-project-member] [instance: 0ef31ff5-f19f-44b0-abb2-b92c76ee9b15] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1078.471263] env[61868]: DEBUG nova.network.neutron [-] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1078.473948] env[61868]: DEBUG oslo_concurrency.lockutils [None req-55e1f196-e08b-4869-bb81-05aaddbc2d3c tempest-FloatingIPsAssociationNegativeTestJSON-2012629252 tempest-FloatingIPsAssociationNegativeTestJSON-2012629252-project-member] Lock "0ef31ff5-f19f-44b0-abb2-b92c76ee9b15" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 234.606s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1078.479955] env[61868]: INFO nova.compute.manager [-] [instance: ffe7c98f-1367-44fa-b8a2-f34b8de7dfde] Took 0.05 seconds to deallocate network for instance. [ 1078.485075] env[61868]: DEBUG nova.compute.manager [None req-06af30fa-5c66-4634-80e9-e2df74027eb2 tempest-AttachInterfacesUnderV243Test-1999617247 tempest-AttachInterfacesUnderV243Test-1999617247-project-member] [instance: 02387068-d9d8-4a13-b819-e1b9ef57a5d0] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1078.510969] env[61868]: DEBUG nova.compute.manager [None req-06af30fa-5c66-4634-80e9-e2df74027eb2 tempest-AttachInterfacesUnderV243Test-1999617247 tempest-AttachInterfacesUnderV243Test-1999617247-project-member] [instance: 02387068-d9d8-4a13-b819-e1b9ef57a5d0] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1078.541954] env[61868]: DEBUG oslo_concurrency.lockutils [None req-06af30fa-5c66-4634-80e9-e2df74027eb2 tempest-AttachInterfacesUnderV243Test-1999617247 tempest-AttachInterfacesUnderV243Test-1999617247-project-member] Lock "02387068-d9d8-4a13-b819-e1b9ef57a5d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 233.915s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1078.559876] env[61868]: DEBUG nova.compute.manager [None req-03d2d008-c03c-4f42-96fd-45e52bd1c08d tempest-SecurityGroupsTestJSON-1335660091 tempest-SecurityGroupsTestJSON-1335660091-project-member] [instance: 7958ae5c-6db5-4491-8e32-cfd2cab686d3] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1078.595821] env[61868]: DEBUG nova.compute.manager [None req-03d2d008-c03c-4f42-96fd-45e52bd1c08d tempest-SecurityGroupsTestJSON-1335660091 tempest-SecurityGroupsTestJSON-1335660091-project-member] [instance: 7958ae5c-6db5-4491-8e32-cfd2cab686d3] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1078.599900] env[61868]: DEBUG oslo_concurrency.lockutils [None req-373fd4a0-64d1-45fa-9d2e-bbae1d56bc39 tempest-VolumesAssistedSnapshotsTest-1225995952 tempest-VolumesAssistedSnapshotsTest-1225995952-project-member] Lock "ffe7c98f-1367-44fa-b8a2-f34b8de7dfde" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.229s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1078.640882] env[61868]: DEBUG oslo_concurrency.lockutils [None req-03d2d008-c03c-4f42-96fd-45e52bd1c08d tempest-SecurityGroupsTestJSON-1335660091 tempest-SecurityGroupsTestJSON-1335660091-project-member] Lock "7958ae5c-6db5-4491-8e32-cfd2cab686d3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.250s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1078.666376] env[61868]: DEBUG nova.compute.manager [None req-a4a72449-fbeb-40e3-a944-4ba957282988 tempest-ImagesOneServerTestJSON-1138467028 tempest-ImagesOneServerTestJSON-1138467028-project-member] [instance: 62c7b41d-1426-408b-a650-4f567171256a] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1078.697662] env[61868]: DEBUG nova.compute.manager [None req-a4a72449-fbeb-40e3-a944-4ba957282988 tempest-ImagesOneServerTestJSON-1138467028 tempest-ImagesOneServerTestJSON-1138467028-project-member] [instance: 62c7b41d-1426-408b-a650-4f567171256a] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1078.730481] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a4a72449-fbeb-40e3-a944-4ba957282988 tempest-ImagesOneServerTestJSON-1138467028 tempest-ImagesOneServerTestJSON-1138467028-project-member] Lock "62c7b41d-1426-408b-a650-4f567171256a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.250s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1078.741852] env[61868]: DEBUG nova.compute.manager [None req-48b104b9-ac6d-42ee-87e0-02e3e28595b8 tempest-TaggedBootDevicesTest_v242-294738673 tempest-TaggedBootDevicesTest_v242-294738673-project-member] [instance: ad906b04-0b02-444a-b837-acde1c22df43] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1078.769070] env[61868]: DEBUG nova.compute.manager [None req-48b104b9-ac6d-42ee-87e0-02e3e28595b8 tempest-TaggedBootDevicesTest_v242-294738673 tempest-TaggedBootDevicesTest_v242-294738673-project-member] [instance: ad906b04-0b02-444a-b837-acde1c22df43] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1078.793194] env[61868]: DEBUG oslo_concurrency.lockutils [None req-48b104b9-ac6d-42ee-87e0-02e3e28595b8 tempest-TaggedBootDevicesTest_v242-294738673 tempest-TaggedBootDevicesTest_v242-294738673-project-member] Lock "ad906b04-0b02-444a-b837-acde1c22df43" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 226.254s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1078.808480] env[61868]: DEBUG nova.compute.manager [None req-86c442aa-c83d-4fea-a456-6b3c2ce5597b tempest-ListImageFiltersTestJSON-801545188 tempest-ListImageFiltersTestJSON-801545188-project-member] [instance: bf46241a-b11c-46e7-b463-c48bc83c8ab2] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1078.836958] env[61868]: DEBUG nova.compute.manager [None req-86c442aa-c83d-4fea-a456-6b3c2ce5597b tempest-ListImageFiltersTestJSON-801545188 tempest-ListImageFiltersTestJSON-801545188-project-member] [instance: bf46241a-b11c-46e7-b463-c48bc83c8ab2] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1078.862419] env[61868]: DEBUG oslo_concurrency.lockutils [None req-86c442aa-c83d-4fea-a456-6b3c2ce5597b tempest-ListImageFiltersTestJSON-801545188 tempest-ListImageFiltersTestJSON-801545188-project-member] Lock "bf46241a-b11c-46e7-b463-c48bc83c8ab2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.382s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1078.873695] env[61868]: DEBUG nova.compute.manager [None req-1ca8d454-42a0-4e1e-ae4c-d5eb4229b8da tempest-ListImageFiltersTestJSON-801545188 tempest-ListImageFiltersTestJSON-801545188-project-member] [instance: a085b185-6663-460d-b2c8-9acee0a89311] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1078.928331] env[61868]: DEBUG nova.compute.manager [None req-1ca8d454-42a0-4e1e-ae4c-d5eb4229b8da tempest-ListImageFiltersTestJSON-801545188 tempest-ListImageFiltersTestJSON-801545188-project-member] [instance: a085b185-6663-460d-b2c8-9acee0a89311] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1078.964371] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1ca8d454-42a0-4e1e-ae4c-d5eb4229b8da tempest-ListImageFiltersTestJSON-801545188 tempest-ListImageFiltersTestJSON-801545188-project-member] Lock "a085b185-6663-460d-b2c8-9acee0a89311" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.980s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1078.975942] env[61868]: DEBUG nova.compute.manager [None req-30236137-5264-49e2-96b4-5c2bf3dbf2e7 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 70ad4510-e8e3-4c1a-a58d-d76822248b9a] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1079.002717] env[61868]: DEBUG nova.compute.manager [None req-30236137-5264-49e2-96b4-5c2bf3dbf2e7 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 70ad4510-e8e3-4c1a-a58d-d76822248b9a] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1079.039524] env[61868]: DEBUG oslo_concurrency.lockutils [None req-30236137-5264-49e2-96b4-5c2bf3dbf2e7 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "70ad4510-e8e3-4c1a-a58d-d76822248b9a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.653s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1079.052327] env[61868]: DEBUG nova.compute.manager [None req-02353919-5328-4d42-b719-185c973ab754 tempest-AttachVolumeShelveTestJSON-1537042233 tempest-AttachVolumeShelveTestJSON-1537042233-project-member] [instance: 57439eb9-5e3d-49e7-a634-24cb78d86c99] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1079.082591] env[61868]: DEBUG nova.compute.manager [None req-02353919-5328-4d42-b719-185c973ab754 tempest-AttachVolumeShelveTestJSON-1537042233 tempest-AttachVolumeShelveTestJSON-1537042233-project-member] [instance: 57439eb9-5e3d-49e7-a634-24cb78d86c99] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1079.106916] env[61868]: DEBUG oslo_concurrency.lockutils [None req-02353919-5328-4d42-b719-185c973ab754 tempest-AttachVolumeShelveTestJSON-1537042233 tempest-AttachVolumeShelveTestJSON-1537042233-project-member] Lock "57439eb9-5e3d-49e7-a634-24cb78d86c99" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 223.693s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1079.118163] env[61868]: DEBUG nova.compute.manager [None req-037fe429-ed0c-4ba7-a582-82bf5ef41e71 tempest-AttachInterfacesTestJSON-327680650 tempest-AttachInterfacesTestJSON-327680650-project-member] [instance: 9a7f9e74-a298-4eee-bf31-153d671ab91a] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1079.152750] env[61868]: DEBUG nova.compute.manager [None req-037fe429-ed0c-4ba7-a582-82bf5ef41e71 tempest-AttachInterfacesTestJSON-327680650 tempest-AttachInterfacesTestJSON-327680650-project-member] [instance: 9a7f9e74-a298-4eee-bf31-153d671ab91a] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1079.185419] env[61868]: DEBUG oslo_concurrency.lockutils [None req-037fe429-ed0c-4ba7-a582-82bf5ef41e71 tempest-AttachInterfacesTestJSON-327680650 tempest-AttachInterfacesTestJSON-327680650-project-member] Lock "9a7f9e74-a298-4eee-bf31-153d671ab91a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 221.875s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1079.204214] env[61868]: DEBUG nova.compute.manager [None req-a4568fd1-bb5a-4bce-b099-3f8cb1b9e938 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: 0e19eccc-446c-48c0-9428-54b71f0b03da] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1079.259364] env[61868]: DEBUG nova.compute.manager [None req-a4568fd1-bb5a-4bce-b099-3f8cb1b9e938 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] [instance: 0e19eccc-446c-48c0-9428-54b71f0b03da] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1079.286494] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a4568fd1-bb5a-4bce-b099-3f8cb1b9e938 tempest-MigrationsAdminTest-1732276123 tempest-MigrationsAdminTest-1732276123-project-member] Lock "0e19eccc-446c-48c0-9428-54b71f0b03da" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 195.756s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1079.300517] env[61868]: DEBUG nova.compute.manager [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1079.366460] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1079.366460] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1079.367930] env[61868]: INFO nova.compute.claims [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1079.487498] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6819ef03-8f1b-49b7-833d-d4b198ed63c1 tempest-ServerActionsV293TestJSON-1559847917 tempest-ServerActionsV293TestJSON-1559847917-project-member] Acquiring lock "05e4476d-5e31-4152-8db9-f24db047eb76" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1079.487731] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6819ef03-8f1b-49b7-833d-d4b198ed63c1 tempest-ServerActionsV293TestJSON-1559847917 tempest-ServerActionsV293TestJSON-1559847917-project-member] Lock "05e4476d-5e31-4152-8db9-f24db047eb76" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1079.842345] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd86cb44-b26b-44ac-97af-23c0c95b0dc5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.850190] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e282fe68-7c0c-463e-a4e6-e149778a4652 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.888036] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83aa3ce8-7e79-40a3-a8b5-0590ec880c29 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.892312] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec5d4e65-f77b-4582-a516-3e9b47a75cb0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.906184] env[61868]: DEBUG nova.compute.provider_tree [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1079.915437] env[61868]: DEBUG nova.scheduler.client.report [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1079.931298] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.565s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1079.931796] env[61868]: DEBUG nova.compute.manager [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1079.971052] env[61868]: DEBUG nova.compute.utils [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1079.972434] env[61868]: DEBUG nova.compute.manager [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1079.972550] env[61868]: DEBUG nova.network.neutron [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1079.985194] env[61868]: DEBUG nova.compute.manager [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1080.022441] env[61868]: DEBUG nova.policy [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ed7d7f61914b4098ade37918b7828521', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '44ba61fc51a548daa81c69ed19c55b5e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 1080.065477] env[61868]: DEBUG nova.compute.manager [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1080.094051] env[61868]: DEBUG nova.virt.hardware [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1080.094559] env[61868]: DEBUG nova.virt.hardware [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1080.094909] env[61868]: DEBUG nova.virt.hardware [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1080.095256] env[61868]: DEBUG nova.virt.hardware [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1080.095553] env[61868]: DEBUG nova.virt.hardware [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1080.095846] env[61868]: DEBUG nova.virt.hardware [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1080.096242] env[61868]: DEBUG nova.virt.hardware [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1080.096557] env[61868]: DEBUG nova.virt.hardware [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1080.096915] env[61868]: DEBUG nova.virt.hardware [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1080.097275] env[61868]: DEBUG nova.virt.hardware [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1080.097690] env[61868]: DEBUG nova.virt.hardware [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1080.098669] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03f91b97-c2b2-45d9-a024-9dd6ceae800f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.108260] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e8d23c-cb8b-451f-af39-d4d459de3ffc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.402788] env[61868]: DEBUG nova.network.neutron [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Successfully created port: 44c5408b-eaa3-47b2-86be-e68084f595f5 {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1080.993701] env[61868]: DEBUG nova.compute.manager [req-b888da67-8290-46c9-b5f8-1ba56440c3a9 req-cf2aed61-7a7b-4fe9-b646-c14253cf1ced service nova] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Received event network-vif-plugged-44c5408b-eaa3-47b2-86be-e68084f595f5 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1080.993920] env[61868]: DEBUG oslo_concurrency.lockutils [req-b888da67-8290-46c9-b5f8-1ba56440c3a9 req-cf2aed61-7a7b-4fe9-b646-c14253cf1ced service nova] Acquiring lock "a8e7708c-b9ee-465b-8df8-798983c6f06c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1080.994123] env[61868]: DEBUG oslo_concurrency.lockutils [req-b888da67-8290-46c9-b5f8-1ba56440c3a9 req-cf2aed61-7a7b-4fe9-b646-c14253cf1ced service nova] Lock "a8e7708c-b9ee-465b-8df8-798983c6f06c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1080.994274] env[61868]: DEBUG oslo_concurrency.lockutils [req-b888da67-8290-46c9-b5f8-1ba56440c3a9 req-cf2aed61-7a7b-4fe9-b646-c14253cf1ced service nova] Lock "a8e7708c-b9ee-465b-8df8-798983c6f06c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1080.994447] env[61868]: DEBUG nova.compute.manager [req-b888da67-8290-46c9-b5f8-1ba56440c3a9 req-cf2aed61-7a7b-4fe9-b646-c14253cf1ced service nova] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] No waiting events found dispatching network-vif-plugged-44c5408b-eaa3-47b2-86be-e68084f595f5 {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1080.994586] env[61868]: WARNING nova.compute.manager [req-b888da67-8290-46c9-b5f8-1ba56440c3a9 req-cf2aed61-7a7b-4fe9-b646-c14253cf1ced service nova] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Received unexpected event network-vif-plugged-44c5408b-eaa3-47b2-86be-e68084f595f5 for instance with vm_state building and task_state spawning. [ 1080.995506] env[61868]: DEBUG nova.network.neutron [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Successfully updated port: 44c5408b-eaa3-47b2-86be-e68084f595f5 {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1081.006752] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Acquiring lock "refresh_cache-a8e7708c-b9ee-465b-8df8-798983c6f06c" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1081.006861] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Acquired lock "refresh_cache-a8e7708c-b9ee-465b-8df8-798983c6f06c" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1081.006999] env[61868]: DEBUG nova.network.neutron [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1081.070039] env[61868]: DEBUG nova.network.neutron [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1081.268032] env[61868]: DEBUG nova.network.neutron [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Updating instance_info_cache with network_info: [{"id": "44c5408b-eaa3-47b2-86be-e68084f595f5", "address": "fa:16:3e:d2:25:e0", "network": {"id": "bac9b890-51b2-46e4-be2b-29f7795809d8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-683745519-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "44ba61fc51a548daa81c69ed19c55b5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44c5408b-ea", "ovs_interfaceid": "44c5408b-eaa3-47b2-86be-e68084f595f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.283896] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Releasing lock "refresh_cache-a8e7708c-b9ee-465b-8df8-798983c6f06c" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1081.284310] env[61868]: DEBUG nova.compute.manager [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Instance network_info: |[{"id": "44c5408b-eaa3-47b2-86be-e68084f595f5", "address": "fa:16:3e:d2:25:e0", "network": {"id": "bac9b890-51b2-46e4-be2b-29f7795809d8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-683745519-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "44ba61fc51a548daa81c69ed19c55b5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44c5408b-ea", "ovs_interfaceid": "44c5408b-eaa3-47b2-86be-e68084f595f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1081.285028] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:25:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e0c77754-4085-434b-a3e8-d61be099ac67', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '44c5408b-eaa3-47b2-86be-e68084f595f5', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1081.293963] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Creating folder: Project (44ba61fc51a548daa81c69ed19c55b5e). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1081.294583] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2d4ff294-9761-4b19-aa3e-6feeb9922d10 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.308898] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Created folder: Project (44ba61fc51a548daa81c69ed19c55b5e) in parent group-v18181. [ 1081.309112] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Creating folder: Instances. Parent ref: group-v18254. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1081.309358] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-26064cda-29ab-4f36-b06d-59631bcd02a1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.320569] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Created folder: Instances in parent group-v18254. [ 1081.320903] env[61868]: DEBUG oslo.service.loopingcall [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1081.321179] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1081.321456] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-276c2ded-62c8-4d95-9e49-1626cbc8c214 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.342674] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1081.342674] env[61868]: value = "task-41039" [ 1081.342674] env[61868]: _type = "Task" [ 1081.342674] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.351952] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41039, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.852555] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41039, 'name': CreateVM_Task, 'duration_secs': 0.310172} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.852865] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1081.853499] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1081.853790] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1081.856582] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3907113f-6662-42fc-931b-5b88c60c404c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.891535] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Reconfiguring VM instance to enable vnc on port - 5909 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 1081.891988] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a6470ad-e93f-4d55-99f8-d4286659951b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.907982] env[61868]: DEBUG oslo_vmware.api [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Waiting for the task: (returnval){ [ 1081.907982] env[61868]: value = "task-41040" [ 1081.907982] env[61868]: _type = "Task" [ 1081.907982] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.917095] env[61868]: DEBUG oslo_vmware.api [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Task: {'id': task-41040, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.418224] env[61868]: DEBUG oslo_vmware.api [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Task: {'id': task-41040, 'name': ReconfigVM_Task, 'duration_secs': 0.114863} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.418509] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Reconfigured VM instance to enable vnc on port - 5909 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 1082.418723] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.565s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1082.418971] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1082.419119] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1082.419447] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1082.419711] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-173e8fb3-d98b-46ce-90bf-a7fc16aab6d6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.424559] env[61868]: DEBUG oslo_vmware.api [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Waiting for the task: (returnval){ [ 1082.424559] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]527dad8c-e189-f919-0d92-f90861527d88" [ 1082.424559] env[61868]: _type = "Task" [ 1082.424559] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.433627] env[61868]: DEBUG oslo_vmware.api [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]527dad8c-e189-f919-0d92-f90861527d88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.934870] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1082.935146] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1082.935460] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1083.034308] env[61868]: DEBUG nova.compute.manager [req-75bca3ad-991d-43ed-b559-038e8856536f req-9cc8522a-bf00-4241-8836-85771b1f4da8 service nova] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Received event network-changed-44c5408b-eaa3-47b2-86be-e68084f595f5 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1083.034589] env[61868]: DEBUG nova.compute.manager [req-75bca3ad-991d-43ed-b559-038e8856536f req-9cc8522a-bf00-4241-8836-85771b1f4da8 service nova] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Refreshing instance network info cache due to event network-changed-44c5408b-eaa3-47b2-86be-e68084f595f5. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1083.034705] env[61868]: DEBUG oslo_concurrency.lockutils [req-75bca3ad-991d-43ed-b559-038e8856536f req-9cc8522a-bf00-4241-8836-85771b1f4da8 service nova] Acquiring lock "refresh_cache-a8e7708c-b9ee-465b-8df8-798983c6f06c" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1083.034896] env[61868]: DEBUG oslo_concurrency.lockutils [req-75bca3ad-991d-43ed-b559-038e8856536f req-9cc8522a-bf00-4241-8836-85771b1f4da8 service nova] Acquired lock "refresh_cache-a8e7708c-b9ee-465b-8df8-798983c6f06c" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1083.035014] env[61868]: DEBUG nova.network.neutron [req-75bca3ad-991d-43ed-b559-038e8856536f req-9cc8522a-bf00-4241-8836-85771b1f4da8 service nova] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Refreshing network info cache for port 44c5408b-eaa3-47b2-86be-e68084f595f5 {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1083.640297] env[61868]: DEBUG nova.network.neutron [req-75bca3ad-991d-43ed-b559-038e8856536f req-9cc8522a-bf00-4241-8836-85771b1f4da8 service nova] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Updated VIF entry in instance network info cache for port 44c5408b-eaa3-47b2-86be-e68084f595f5. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1083.640656] env[61868]: DEBUG nova.network.neutron [req-75bca3ad-991d-43ed-b559-038e8856536f req-9cc8522a-bf00-4241-8836-85771b1f4da8 service nova] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Updating instance_info_cache with network_info: [{"id": "44c5408b-eaa3-47b2-86be-e68084f595f5", "address": "fa:16:3e:d2:25:e0", "network": {"id": "bac9b890-51b2-46e4-be2b-29f7795809d8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-683745519-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "44ba61fc51a548daa81c69ed19c55b5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44c5408b-ea", "ovs_interfaceid": "44c5408b-eaa3-47b2-86be-e68084f595f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.649622] env[61868]: DEBUG oslo_concurrency.lockutils [req-75bca3ad-991d-43ed-b559-038e8856536f req-9cc8522a-bf00-4241-8836-85771b1f4da8 service nova] Releasing lock "refresh_cache-a8e7708c-b9ee-465b-8df8-798983c6f06c" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1084.486483] env[61868]: WARNING oslo_vmware.rw_handles [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1084.486483] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1084.486483] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1084.486483] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1084.486483] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1084.486483] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 1084.486483] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1084.486483] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1084.486483] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1084.486483] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1084.486483] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1084.486483] env[61868]: ERROR oslo_vmware.rw_handles [ 1084.487045] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/f119f70e-a542-41d1-b300-ecfffd872bfc/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore1 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1084.488721] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1084.489094] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Copying Virtual Disk [datastore1] vmware_temp/f119f70e-a542-41d1-b300-ecfffd872bfc/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore1] vmware_temp/f119f70e-a542-41d1-b300-ecfffd872bfc/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1084.489496] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8798add7-7e63-4f98-a1ee-19f6589a18f0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.498973] env[61868]: DEBUG oslo_vmware.api [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Waiting for the task: (returnval){ [ 1084.498973] env[61868]: value = "task-41041" [ 1084.498973] env[61868]: _type = "Task" [ 1084.498973] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.507920] env[61868]: DEBUG oslo_vmware.api [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Task: {'id': task-41041, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.013753] env[61868]: DEBUG oslo_vmware.exceptions [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1085.014039] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1085.014569] env[61868]: ERROR nova.compute.manager [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1085.014569] env[61868]: Faults: ['InvalidArgument'] [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Traceback (most recent call last): [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] yield resources [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] self.driver.spawn(context, instance, image_meta, [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] self._fetch_image_if_missing(context, vi) [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] image_cache(vi, tmp_image_ds_loc) [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] vm_util.copy_virtual_disk( [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] session._wait_for_task(vmdk_copy_task) [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] return self.wait_for_task(task_ref) [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] return evt.wait() [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] result = hub.switch() [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] return self.greenlet.switch() [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] self.f(*self.args, **self.kw) [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] raise exceptions.translate_fault(task_info.error) [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Faults: ['InvalidArgument'] [ 1085.014569] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] [ 1085.015531] env[61868]: INFO nova.compute.manager [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Terminating instance [ 1085.017803] env[61868]: DEBUG nova.compute.manager [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1085.018776] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1085.019675] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-419a5800-bc47-4e55-86b0-cc862a66f7ed {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.026999] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1085.027352] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c26adf89-6179-4015-bda8-0cdd8690bd4d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.091869] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1085.092107] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Deleting contents of the VM from datastore datastore1 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1085.092288] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Deleting the datastore file [datastore1] bb3714cc-7f34-4a94-b682-aefda6f48ed3 {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1085.092549] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1c6fd1b0-3f0b-42ec-b839-55bfbd837df5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.099530] env[61868]: DEBUG oslo_vmware.api [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Waiting for the task: (returnval){ [ 1085.099530] env[61868]: value = "task-41043" [ 1085.099530] env[61868]: _type = "Task" [ 1085.099530] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.108381] env[61868]: DEBUG oslo_vmware.api [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Task: {'id': task-41043, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.612477] env[61868]: DEBUG oslo_vmware.api [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Task: {'id': task-41043, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066178} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.612755] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1085.612934] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Deleted contents of the VM from datastore datastore1 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1085.613119] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1085.613297] env[61868]: INFO nova.compute.manager [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1085.615468] env[61868]: DEBUG nova.compute.claims [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1085.615605] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1085.615847] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1086.095393] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-061247a5-8b71-47a3-8112-4a4cd1033c23 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.104372] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e83689f2-bce8-41cb-b1a3-47ad4f2373d7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.157356] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59490238-3ec9-455a-ab58-390b028e11ea {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.167665] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4867002e-af0d-4d6e-9a5a-e2fed8910c7e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.187568] env[61868]: DEBUG nova.compute.provider_tree [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1086.198368] env[61868]: DEBUG nova.scheduler.client.report [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1086.225201] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.609s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1086.226278] env[61868]: ERROR nova.compute.manager [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1086.226278] env[61868]: Faults: ['InvalidArgument'] [ 1086.226278] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Traceback (most recent call last): [ 1086.226278] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1086.226278] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] self.driver.spawn(context, instance, image_meta, [ 1086.226278] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1086.226278] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1086.226278] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1086.226278] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] self._fetch_image_if_missing(context, vi) [ 1086.226278] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1086.226278] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] image_cache(vi, tmp_image_ds_loc) [ 1086.226278] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1086.226278] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] vm_util.copy_virtual_disk( [ 1086.226278] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1086.226278] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] session._wait_for_task(vmdk_copy_task) [ 1086.226278] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1086.226278] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] return self.wait_for_task(task_ref) [ 1086.226278] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1086.226278] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] return evt.wait() [ 1086.226278] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1086.226278] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] result = hub.switch() [ 1086.226278] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1086.226278] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] return self.greenlet.switch() [ 1086.226278] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1086.226278] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] self.f(*self.args, **self.kw) [ 1086.226278] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1086.226278] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] raise exceptions.translate_fault(task_info.error) [ 1086.226278] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1086.226278] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Faults: ['InvalidArgument'] [ 1086.226278] env[61868]: ERROR nova.compute.manager [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] [ 1086.227527] env[61868]: DEBUG nova.compute.utils [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1086.229215] env[61868]: DEBUG nova.compute.manager [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Build of instance bb3714cc-7f34-4a94-b682-aefda6f48ed3 was re-scheduled: A specified parameter was not correct: fileType [ 1086.229215] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1086.229784] env[61868]: DEBUG nova.compute.manager [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1086.230020] env[61868]: DEBUG nova.compute.manager [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1086.230234] env[61868]: DEBUG nova.compute.manager [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1086.230435] env[61868]: DEBUG nova.network.neutron [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1086.530280] env[61868]: DEBUG nova.network.neutron [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1086.553463] env[61868]: INFO nova.compute.manager [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Took 0.32 seconds to deallocate network for instance. [ 1086.679795] env[61868]: INFO nova.scheduler.client.report [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Deleted allocations for instance bb3714cc-7f34-4a94-b682-aefda6f48ed3 [ 1086.719759] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b7fc176f-169c-447a-aa19-d85bcc732076 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "bb3714cc-7f34-4a94-b682-aefda6f48ed3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 248.793s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1086.719759] env[61868]: DEBUG oslo_concurrency.lockutils [None req-829fbfc4-ca24-42cc-84ec-82e326dfcd75 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "bb3714cc-7f34-4a94-b682-aefda6f48ed3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 51.159s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1086.719759] env[61868]: DEBUG oslo_concurrency.lockutils [None req-829fbfc4-ca24-42cc-84ec-82e326dfcd75 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "bb3714cc-7f34-4a94-b682-aefda6f48ed3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1086.719759] env[61868]: DEBUG oslo_concurrency.lockutils [None req-829fbfc4-ca24-42cc-84ec-82e326dfcd75 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "bb3714cc-7f34-4a94-b682-aefda6f48ed3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1086.719759] env[61868]: DEBUG oslo_concurrency.lockutils [None req-829fbfc4-ca24-42cc-84ec-82e326dfcd75 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "bb3714cc-7f34-4a94-b682-aefda6f48ed3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1086.722242] env[61868]: INFO nova.compute.manager [None req-829fbfc4-ca24-42cc-84ec-82e326dfcd75 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Terminating instance [ 1086.724376] env[61868]: DEBUG nova.compute.manager [None req-829fbfc4-ca24-42cc-84ec-82e326dfcd75 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1086.724571] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-829fbfc4-ca24-42cc-84ec-82e326dfcd75 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1086.725061] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c37bb42f-d70d-4bfc-8cce-99e37a160a68 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.736450] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71e7d7ef-8224-4cbd-992e-8afa3482beb8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.752041] env[61868]: DEBUG nova.compute.manager [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1086.771606] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-829fbfc4-ca24-42cc-84ec-82e326dfcd75 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bb3714cc-7f34-4a94-b682-aefda6f48ed3 could not be found. [ 1086.771867] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-829fbfc4-ca24-42cc-84ec-82e326dfcd75 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1086.772219] env[61868]: INFO nova.compute.manager [None req-829fbfc4-ca24-42cc-84ec-82e326dfcd75 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1086.772624] env[61868]: DEBUG oslo.service.loopingcall [None req-829fbfc4-ca24-42cc-84ec-82e326dfcd75 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1086.773003] env[61868]: DEBUG nova.compute.manager [-] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1086.773131] env[61868]: DEBUG nova.network.neutron [-] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1086.808290] env[61868]: DEBUG nova.network.neutron [-] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1086.831557] env[61868]: INFO nova.compute.manager [-] [instance: bb3714cc-7f34-4a94-b682-aefda6f48ed3] Took 0.06 seconds to deallocate network for instance. [ 1086.847346] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1086.847346] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1086.847346] env[61868]: INFO nova.compute.claims [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1086.952889] env[61868]: DEBUG oslo_concurrency.lockutils [None req-829fbfc4-ca24-42cc-84ec-82e326dfcd75 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "bb3714cc-7f34-4a94-b682-aefda6f48ed3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.234s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1087.343520] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd39ab97-b752-4adb-8fa9-d06e8a0b5b09 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.354240] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7092327-1c7e-43f9-b2a3-8d2c28ea77c8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.390895] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce23146a-faaf-40a7-8802-13cb697e4a5e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.399610] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de9ae12b-9e4b-455a-9368-eb1ebe4a1aee {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.415513] env[61868]: DEBUG nova.compute.provider_tree [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1087.427971] env[61868]: DEBUG nova.scheduler.client.report [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1087.446251] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.602s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1087.446891] env[61868]: DEBUG nova.compute.manager [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1087.489417] env[61868]: DEBUG nova.compute.utils [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1087.490782] env[61868]: DEBUG nova.compute.manager [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1087.490995] env[61868]: DEBUG nova.network.neutron [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1087.502493] env[61868]: DEBUG nova.compute.manager [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1087.588792] env[61868]: DEBUG nova.compute.manager [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1087.614022] env[61868]: DEBUG nova.virt.hardware [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1087.614258] env[61868]: DEBUG nova.virt.hardware [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1087.614409] env[61868]: DEBUG nova.virt.hardware [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1087.614584] env[61868]: DEBUG nova.virt.hardware [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1087.614737] env[61868]: DEBUG nova.virt.hardware [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1087.614882] env[61868]: DEBUG nova.virt.hardware [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1087.615081] env[61868]: DEBUG nova.virt.hardware [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1087.615235] env[61868]: DEBUG nova.virt.hardware [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1087.615394] env[61868]: DEBUG nova.virt.hardware [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1087.615549] env[61868]: DEBUG nova.virt.hardware [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1087.615711] env[61868]: DEBUG nova.virt.hardware [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1087.616591] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d42c1e-3338-468b-a7b3-cd209810b052 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.626307] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-413646e4-065d-4f80-ab36-ae71a7838413 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.654216] env[61868]: DEBUG nova.policy [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e411a3410a364d60892a1201038964eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b0c4f60be1c14a0f8b75528abf50c2e2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 1088.154403] env[61868]: DEBUG oslo_concurrency.lockutils [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "f7e1c6c5-752e-4fef-b84f-232b2dbee4a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1088.154777] env[61868]: DEBUG oslo_concurrency.lockutils [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "f7e1c6c5-752e-4fef-b84f-232b2dbee4a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1088.284296] env[61868]: DEBUG nova.network.neutron [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Successfully created port: 151de087-3a41-4b53-b6d8-acc5a00d9c10 {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1088.847222] env[61868]: DEBUG nova.compute.manager [req-91442c9e-27e0-4cd9-b528-ee0e1f386f07 req-0fcf797b-490a-4a24-82ff-075850c5dc9d service nova] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Received event network-vif-plugged-151de087-3a41-4b53-b6d8-acc5a00d9c10 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1088.847222] env[61868]: DEBUG oslo_concurrency.lockutils [req-91442c9e-27e0-4cd9-b528-ee0e1f386f07 req-0fcf797b-490a-4a24-82ff-075850c5dc9d service nova] Acquiring lock "a7371133-1ff3-4016-84fc-a59a9ef6d445-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1088.847222] env[61868]: DEBUG oslo_concurrency.lockutils [req-91442c9e-27e0-4cd9-b528-ee0e1f386f07 req-0fcf797b-490a-4a24-82ff-075850c5dc9d service nova] Lock "a7371133-1ff3-4016-84fc-a59a9ef6d445-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1088.847222] env[61868]: DEBUG oslo_concurrency.lockutils [req-91442c9e-27e0-4cd9-b528-ee0e1f386f07 req-0fcf797b-490a-4a24-82ff-075850c5dc9d service nova] Lock "a7371133-1ff3-4016-84fc-a59a9ef6d445-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1088.847222] env[61868]: DEBUG nova.compute.manager [req-91442c9e-27e0-4cd9-b528-ee0e1f386f07 req-0fcf797b-490a-4a24-82ff-075850c5dc9d service nova] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] No waiting events found dispatching network-vif-plugged-151de087-3a41-4b53-b6d8-acc5a00d9c10 {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1088.847222] env[61868]: WARNING nova.compute.manager [req-91442c9e-27e0-4cd9-b528-ee0e1f386f07 req-0fcf797b-490a-4a24-82ff-075850c5dc9d service nova] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Received unexpected event network-vif-plugged-151de087-3a41-4b53-b6d8-acc5a00d9c10 for instance with vm_state building and task_state spawning. [ 1088.849617] env[61868]: DEBUG nova.network.neutron [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Successfully updated port: 151de087-3a41-4b53-b6d8-acc5a00d9c10 {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1088.862624] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Acquiring lock "refresh_cache-a7371133-1ff3-4016-84fc-a59a9ef6d445" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1088.862624] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Acquired lock "refresh_cache-a7371133-1ff3-4016-84fc-a59a9ef6d445" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1088.862715] env[61868]: DEBUG nova.network.neutron [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1088.904695] env[61868]: DEBUG nova.network.neutron [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1089.105031] env[61868]: DEBUG nova.network.neutron [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Updating instance_info_cache with network_info: [{"id": "151de087-3a41-4b53-b6d8-acc5a00d9c10", "address": "fa:16:3e:fd:06:1d", "network": {"id": "b24df7aa-f569-4cbe-8e2c-951efa24c792", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1413222465-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "b0c4f60be1c14a0f8b75528abf50c2e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e59b364d-b7f6-499d-b7dc-82b8a819aa12", "external-id": "nsx-vlan-transportzone-731", "segmentation_id": 731, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap151de087-3a", "ovs_interfaceid": "151de087-3a41-4b53-b6d8-acc5a00d9c10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.120776] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Releasing lock "refresh_cache-a7371133-1ff3-4016-84fc-a59a9ef6d445" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1089.121128] env[61868]: DEBUG nova.compute.manager [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Instance network_info: |[{"id": "151de087-3a41-4b53-b6d8-acc5a00d9c10", "address": "fa:16:3e:fd:06:1d", "network": {"id": "b24df7aa-f569-4cbe-8e2c-951efa24c792", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1413222465-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "b0c4f60be1c14a0f8b75528abf50c2e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e59b364d-b7f6-499d-b7dc-82b8a819aa12", "external-id": "nsx-vlan-transportzone-731", "segmentation_id": 731, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap151de087-3a", "ovs_interfaceid": "151de087-3a41-4b53-b6d8-acc5a00d9c10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1089.121563] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:06:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e59b364d-b7f6-499d-b7dc-82b8a819aa12', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '151de087-3a41-4b53-b6d8-acc5a00d9c10', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1089.131744] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Creating folder: Project (b0c4f60be1c14a0f8b75528abf50c2e2). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1089.132476] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5e257f9a-24ca-4462-ac19-205ae3b7fe5e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.145297] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Created folder: Project (b0c4f60be1c14a0f8b75528abf50c2e2) in parent group-v18181. [ 1089.145297] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Creating folder: Instances. Parent ref: group-v18257. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1089.145542] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-419db321-0f60-43d3-af77-fa3a27592f4d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.158263] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Created folder: Instances in parent group-v18257. [ 1089.158700] env[61868]: DEBUG oslo.service.loopingcall [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1089.159047] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1089.159400] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5c82e220-d9f8-4002-865f-fbebc7a3d413 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.182200] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1089.182200] env[61868]: value = "task-41046" [ 1089.182200] env[61868]: _type = "Task" [ 1089.182200] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.191724] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41046, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.693346] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41046, 'name': CreateVM_Task, 'duration_secs': 0.329945} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.693346] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1089.694064] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1089.694442] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1089.697332] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446aa265-3d35-4d99-8aaa-f587fad89f24 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.733655] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Reconfiguring VM instance to enable vnc on port - 5908 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 1089.734317] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-39355221-a431-4039-8625-65454667e233 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.751755] env[61868]: DEBUG oslo_vmware.api [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Waiting for the task: (returnval){ [ 1089.751755] env[61868]: value = "task-41047" [ 1089.751755] env[61868]: _type = "Task" [ 1089.751755] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.761209] env[61868]: DEBUG oslo_vmware.api [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Task: {'id': task-41047, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.264368] env[61868]: DEBUG oslo_vmware.api [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Task: {'id': task-41047, 'name': ReconfigVM_Task, 'duration_secs': 0.129353} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.265266] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Reconfigured VM instance to enable vnc on port - 5908 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 1090.265605] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.571s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1090.265989] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1090.266261] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1090.266697] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1090.267271] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-020d2326-56db-40d8-b37e-9a886d8379f6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.274981] env[61868]: DEBUG oslo_vmware.api [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Waiting for the task: (returnval){ [ 1090.274981] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]525ad887-5295-e8fd-8126-cd75c192cbb1" [ 1090.274981] env[61868]: _type = "Task" [ 1090.274981] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.284672] env[61868]: DEBUG oslo_vmware.api [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]525ad887-5295-e8fd-8126-cd75c192cbb1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.788883] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1090.789297] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1090.789689] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1090.920502] env[61868]: DEBUG nova.compute.manager [req-88a84a88-eb26-4f53-94f8-edf1fd51cd9f req-d71f0e41-7d5b-489f-8102-3b4a2003d803 service nova] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Received event network-changed-151de087-3a41-4b53-b6d8-acc5a00d9c10 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1090.920729] env[61868]: DEBUG nova.compute.manager [req-88a84a88-eb26-4f53-94f8-edf1fd51cd9f req-d71f0e41-7d5b-489f-8102-3b4a2003d803 service nova] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Refreshing instance network info cache due to event network-changed-151de087-3a41-4b53-b6d8-acc5a00d9c10. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1090.920980] env[61868]: DEBUG oslo_concurrency.lockutils [req-88a84a88-eb26-4f53-94f8-edf1fd51cd9f req-d71f0e41-7d5b-489f-8102-3b4a2003d803 service nova] Acquiring lock "refresh_cache-a7371133-1ff3-4016-84fc-a59a9ef6d445" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1090.921076] env[61868]: DEBUG oslo_concurrency.lockutils [req-88a84a88-eb26-4f53-94f8-edf1fd51cd9f req-d71f0e41-7d5b-489f-8102-3b4a2003d803 service nova] Acquired lock "refresh_cache-a7371133-1ff3-4016-84fc-a59a9ef6d445" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1090.921238] env[61868]: DEBUG nova.network.neutron [req-88a84a88-eb26-4f53-94f8-edf1fd51cd9f req-d71f0e41-7d5b-489f-8102-3b4a2003d803 service nova] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Refreshing network info cache for port 151de087-3a41-4b53-b6d8-acc5a00d9c10 {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1091.190195] env[61868]: DEBUG nova.network.neutron [req-88a84a88-eb26-4f53-94f8-edf1fd51cd9f req-d71f0e41-7d5b-489f-8102-3b4a2003d803 service nova] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Updated VIF entry in instance network info cache for port 151de087-3a41-4b53-b6d8-acc5a00d9c10. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1091.190586] env[61868]: DEBUG nova.network.neutron [req-88a84a88-eb26-4f53-94f8-edf1fd51cd9f req-d71f0e41-7d5b-489f-8102-3b4a2003d803 service nova] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Updating instance_info_cache with network_info: [{"id": "151de087-3a41-4b53-b6d8-acc5a00d9c10", "address": "fa:16:3e:fd:06:1d", "network": {"id": "b24df7aa-f569-4cbe-8e2c-951efa24c792", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1413222465-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "b0c4f60be1c14a0f8b75528abf50c2e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e59b364d-b7f6-499d-b7dc-82b8a819aa12", "external-id": "nsx-vlan-transportzone-731", "segmentation_id": 731, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap151de087-3a", "ovs_interfaceid": "151de087-3a41-4b53-b6d8-acc5a00d9c10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.200037] env[61868]: DEBUG oslo_concurrency.lockutils [req-88a84a88-eb26-4f53-94f8-edf1fd51cd9f req-d71f0e41-7d5b-489f-8102-3b4a2003d803 service nova] Releasing lock "refresh_cache-a7371133-1ff3-4016-84fc-a59a9ef6d445" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1099.649953] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Acquiring lock "eafa3522-51e3-4582-b060-3e3ac4224ae2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1099.650234] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Lock "eafa3522-51e3-4582-b060-3e3ac4224ae2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1120.351843] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1120.352147] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Cleaning up deleted instances {{(pid=61868) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 1120.361555] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] There are 0 instances to clean {{(pid=61868) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1120.361791] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1124.367587] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1124.781108] env[61868]: WARNING oslo_vmware.rw_handles [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1124.781108] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1124.781108] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1124.781108] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1124.781108] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1124.781108] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 1124.781108] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1124.781108] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1124.781108] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1124.781108] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1124.781108] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1124.781108] env[61868]: ERROR oslo_vmware.rw_handles [ 1124.781616] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/46aac19c-4fea-4511-b47f-fadb55681a1c/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1124.783406] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1124.783663] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Copying Virtual Disk [datastore2] vmware_temp/46aac19c-4fea-4511-b47f-fadb55681a1c/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/46aac19c-4fea-4511-b47f-fadb55681a1c/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1124.783940] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-99e60dbd-c80f-4d7d-8e5a-aabb573bf465 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.792289] env[61868]: DEBUG oslo_vmware.api [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Waiting for the task: (returnval){ [ 1124.792289] env[61868]: value = "task-41048" [ 1124.792289] env[61868]: _type = "Task" [ 1124.792289] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.800115] env[61868]: DEBUG oslo_vmware.api [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Task: {'id': task-41048, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.303425] env[61868]: DEBUG oslo_vmware.exceptions [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1125.303710] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1125.304241] env[61868]: ERROR nova.compute.manager [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1125.304241] env[61868]: Faults: ['InvalidArgument'] [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Traceback (most recent call last): [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] yield resources [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] self.driver.spawn(context, instance, image_meta, [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] self._fetch_image_if_missing(context, vi) [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] image_cache(vi, tmp_image_ds_loc) [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] vm_util.copy_virtual_disk( [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] session._wait_for_task(vmdk_copy_task) [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] return self.wait_for_task(task_ref) [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] return evt.wait() [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] result = hub.switch() [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] return self.greenlet.switch() [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] self.f(*self.args, **self.kw) [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] raise exceptions.translate_fault(task_info.error) [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Faults: ['InvalidArgument'] [ 1125.304241] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] [ 1125.305241] env[61868]: INFO nova.compute.manager [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Terminating instance [ 1125.306181] env[61868]: DEBUG oslo_concurrency.lockutils [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1125.306389] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1125.306629] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2074a23d-35d0-43fa-a461-ebf2fb69615c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.309042] env[61868]: DEBUG nova.compute.manager [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1125.309239] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1125.310081] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54d43e04-94a6-47b1-8b86-9e2a8f1fb3f1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.317600] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1125.317840] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7afde5a2-90f8-4ccc-9db9-220c6c4842a6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.320377] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1125.320537] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1125.321587] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-149a0804-8e33-4e66-8a61-625126403111 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.326860] env[61868]: DEBUG oslo_vmware.api [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Waiting for the task: (returnval){ [ 1125.326860] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]528ce4d8-f19a-bf2a-9bae-9bc09a38c6a6" [ 1125.326860] env[61868]: _type = "Task" [ 1125.326860] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.336070] env[61868]: DEBUG oslo_vmware.api [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]528ce4d8-f19a-bf2a-9bae-9bc09a38c6a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.346660] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1125.398011] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1125.398253] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1125.398422] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Deleting the datastore file [datastore2] 4ed52e2d-018f-4405-9380-0c7f62ef2db3 {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1125.398745] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e8114bc2-a84c-4a78-87cd-e2e5ac8e7ed3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.406165] env[61868]: DEBUG oslo_vmware.api [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Waiting for the task: (returnval){ [ 1125.406165] env[61868]: value = "task-41050" [ 1125.406165] env[61868]: _type = "Task" [ 1125.406165] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.414926] env[61868]: DEBUG oslo_vmware.api [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Task: {'id': task-41050, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.837891] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1125.838166] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Creating directory with path [datastore2] vmware_temp/1a8c0536-6fbe-45b4-a94f-acd4bf805b46/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1125.838414] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-00014340-1704-4787-9003-db86bd20154f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.850446] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Created directory with path [datastore2] vmware_temp/1a8c0536-6fbe-45b4-a94f-acd4bf805b46/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1125.850652] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Fetch image to [datastore2] vmware_temp/1a8c0536-6fbe-45b4-a94f-acd4bf805b46/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1125.850851] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/1a8c0536-6fbe-45b4-a94f-acd4bf805b46/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1125.851760] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70146d97-2b88-4578-a579-41ee2f9baf94 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.860763] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4b37a90-caaa-4272-ab5e-99b8afc1600c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.871312] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-107bff0b-83b7-403f-ac63-521802a37634 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.905703] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5931ce79-5445-4515-9e7b-8df1407758b5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.916679] env[61868]: DEBUG oslo_vmware.api [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Task: {'id': task-41050, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075755} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.920054] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1125.920054] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1125.920054] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1125.920054] env[61868]: INFO nova.compute.manager [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1125.920891] env[61868]: DEBUG nova.compute.claims [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1125.921065] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1125.921275] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1125.923831] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-66136724-4415-41eb-b0fe-df985054406a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.950928] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1126.011785] env[61868]: DEBUG oslo_vmware.rw_handles [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1a8c0536-6fbe-45b4-a94f-acd4bf805b46/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1126.066038] env[61868]: DEBUG nova.scheduler.client.report [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Refreshing inventories for resource provider 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1126.070348] env[61868]: DEBUG oslo_vmware.rw_handles [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1126.070555] env[61868]: DEBUG oslo_vmware.rw_handles [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1a8c0536-6fbe-45b4-a94f-acd4bf805b46/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1126.083158] env[61868]: DEBUG nova.scheduler.client.report [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Updating ProviderTree inventory for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1126.083508] env[61868]: DEBUG nova.compute.provider_tree [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Updating inventory in ProviderTree for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1126.095546] env[61868]: DEBUG nova.scheduler.client.report [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Refreshing aggregate associations for resource provider 6539a0d3-09f9-481f-a837-7ea10081c3cc, aggregates: None {{(pid=61868) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1126.114770] env[61868]: DEBUG nova.scheduler.client.report [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Refreshing trait associations for resource provider 6539a0d3-09f9-481f-a837-7ea10081c3cc, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61868) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1126.569200] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64170cb8-99ef-454d-83b0-0030e9677806 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.576593] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0e2d8a5-e83b-40e6-9d61-da558065c361 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.606616] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d50d25f9-4c7a-4658-9536-1980254787ba {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.614808] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f346a70-6e71-447e-a79e-05c586c63bfe {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.631143] env[61868]: DEBUG nova.compute.provider_tree [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1126.641830] env[61868]: DEBUG nova.scheduler.client.report [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1126.658674] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.737s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1126.659228] env[61868]: ERROR nova.compute.manager [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1126.659228] env[61868]: Faults: ['InvalidArgument'] [ 1126.659228] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Traceback (most recent call last): [ 1126.659228] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1126.659228] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] self.driver.spawn(context, instance, image_meta, [ 1126.659228] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1126.659228] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1126.659228] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1126.659228] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] self._fetch_image_if_missing(context, vi) [ 1126.659228] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1126.659228] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] image_cache(vi, tmp_image_ds_loc) [ 1126.659228] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1126.659228] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] vm_util.copy_virtual_disk( [ 1126.659228] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1126.659228] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] session._wait_for_task(vmdk_copy_task) [ 1126.659228] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1126.659228] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] return self.wait_for_task(task_ref) [ 1126.659228] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1126.659228] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] return evt.wait() [ 1126.659228] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1126.659228] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] result = hub.switch() [ 1126.659228] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1126.659228] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] return self.greenlet.switch() [ 1126.659228] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1126.659228] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] self.f(*self.args, **self.kw) [ 1126.659228] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1126.659228] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] raise exceptions.translate_fault(task_info.error) [ 1126.659228] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1126.659228] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Faults: ['InvalidArgument'] [ 1126.659228] env[61868]: ERROR nova.compute.manager [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] [ 1126.660204] env[61868]: DEBUG nova.compute.utils [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1126.661916] env[61868]: DEBUG nova.compute.manager [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Build of instance 4ed52e2d-018f-4405-9380-0c7f62ef2db3 was re-scheduled: A specified parameter was not correct: fileType [ 1126.661916] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1126.662280] env[61868]: DEBUG nova.compute.manager [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1126.662454] env[61868]: DEBUG nova.compute.manager [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1126.662619] env[61868]: DEBUG nova.compute.manager [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1126.662781] env[61868]: DEBUG nova.network.neutron [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1126.943548] env[61868]: DEBUG nova.network.neutron [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1126.959832] env[61868]: INFO nova.compute.manager [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Took 0.29 seconds to deallocate network for instance. [ 1127.062709] env[61868]: INFO nova.scheduler.client.report [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Deleted allocations for instance 4ed52e2d-018f-4405-9380-0c7f62ef2db3 [ 1127.090052] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4e159b2a-b486-46f5-adb0-f6fd9c1569a6 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "4ed52e2d-018f-4405-9380-0c7f62ef2db3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 519.080s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1127.092035] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ef095898-399f-403f-9cbd-20597dfd0729 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "4ed52e2d-018f-4405-9380-0c7f62ef2db3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 321.140s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1127.092035] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ef095898-399f-403f-9cbd-20597dfd0729 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "4ed52e2d-018f-4405-9380-0c7f62ef2db3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1127.092035] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ef095898-399f-403f-9cbd-20597dfd0729 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "4ed52e2d-018f-4405-9380-0c7f62ef2db3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1127.092035] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ef095898-399f-403f-9cbd-20597dfd0729 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "4ed52e2d-018f-4405-9380-0c7f62ef2db3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1127.094560] env[61868]: INFO nova.compute.manager [None req-ef095898-399f-403f-9cbd-20597dfd0729 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Terminating instance [ 1127.096309] env[61868]: DEBUG nova.compute.manager [None req-ef095898-399f-403f-9cbd-20597dfd0729 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1127.096502] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-ef095898-399f-403f-9cbd-20597dfd0729 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1127.096759] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5fe98094-aad5-4566-ae7d-5688c086f31d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.105898] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8778240e-24e8-4acd-a5d4-9a1b1372a8a9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.117211] env[61868]: DEBUG nova.compute.manager [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1127.139068] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-ef095898-399f-403f-9cbd-20597dfd0729 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4ed52e2d-018f-4405-9380-0c7f62ef2db3 could not be found. [ 1127.139300] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-ef095898-399f-403f-9cbd-20597dfd0729 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1127.139479] env[61868]: INFO nova.compute.manager [None req-ef095898-399f-403f-9cbd-20597dfd0729 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1127.139724] env[61868]: DEBUG oslo.service.loopingcall [None req-ef095898-399f-403f-9cbd-20597dfd0729 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1127.139969] env[61868]: DEBUG nova.compute.manager [-] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1127.140079] env[61868]: DEBUG nova.network.neutron [-] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1127.171980] env[61868]: DEBUG nova.network.neutron [-] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1127.182312] env[61868]: INFO nova.compute.manager [-] [instance: 4ed52e2d-018f-4405-9380-0c7f62ef2db3] Took 0.04 seconds to deallocate network for instance. [ 1127.183429] env[61868]: DEBUG oslo_concurrency.lockutils [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1127.183808] env[61868]: DEBUG oslo_concurrency.lockutils [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1127.185541] env[61868]: INFO nova.compute.claims [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1127.299027] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ef095898-399f-403f-9cbd-20597dfd0729 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "4ed52e2d-018f-4405-9380-0c7f62ef2db3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.208s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1127.350464] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1127.751961] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c2a1da9-57e2-44c8-9e7e-f95dfc23aad5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.760649] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-578f3a1d-ba87-4852-80ee-7999a23b124b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.790910] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c19f7d35-3116-46b2-ab39-6d0282aaca01 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.798935] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-796c5db6-5afc-440a-bc72-84ae87fba54b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.812382] env[61868]: DEBUG nova.compute.provider_tree [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1127.821682] env[61868]: DEBUG nova.scheduler.client.report [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1127.839213] env[61868]: DEBUG oslo_concurrency.lockutils [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.655s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1127.839764] env[61868]: DEBUG nova.compute.manager [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1127.880242] env[61868]: DEBUG nova.compute.utils [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1127.881669] env[61868]: DEBUG nova.compute.manager [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1127.881772] env[61868]: DEBUG nova.network.neutron [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1127.893372] env[61868]: DEBUG nova.compute.manager [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1127.934181] env[61868]: DEBUG nova.policy [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ac8a4577c67a4fa58228da89e9ecd071', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e82eb11d22f443dc82bd83a4d58571bb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 1127.972216] env[61868]: DEBUG nova.compute.manager [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1128.003128] env[61868]: DEBUG nova.virt.hardware [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1128.003380] env[61868]: DEBUG nova.virt.hardware [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1128.003558] env[61868]: DEBUG nova.virt.hardware [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1128.003717] env[61868]: DEBUG nova.virt.hardware [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1128.003862] env[61868]: DEBUG nova.virt.hardware [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1128.004013] env[61868]: DEBUG nova.virt.hardware [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1128.004261] env[61868]: DEBUG nova.virt.hardware [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1128.004448] env[61868]: DEBUG nova.virt.hardware [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1128.005057] env[61868]: DEBUG nova.virt.hardware [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1128.005138] env[61868]: DEBUG nova.virt.hardware [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1128.005307] env[61868]: DEBUG nova.virt.hardware [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1128.007247] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-860493f0-0b20-4f84-b8a9-57b34ba84e7e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.016934] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd1f870-6e7b-4adc-915f-4e598a270654 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.220049] env[61868]: DEBUG nova.network.neutron [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Successfully created port: 7d704867-786c-4653-bb82-7432f6f5dc3d {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1128.351574] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1128.351730] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1128.351773] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 1128.372332] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1128.372488] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1128.372623] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1128.372750] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1128.372874] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1128.372995] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1128.373117] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1128.373237] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1128.373356] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1128.373474] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1128.373591] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 1128.374171] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1128.757642] env[61868]: DEBUG nova.network.neutron [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Successfully updated port: 7d704867-786c-4653-bb82-7432f6f5dc3d {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1128.770238] env[61868]: DEBUG oslo_concurrency.lockutils [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Acquiring lock "refresh_cache-b2dbce45-4bfa-4356-b608-e44e5a15c081" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1128.770383] env[61868]: DEBUG oslo_concurrency.lockutils [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Acquired lock "refresh_cache-b2dbce45-4bfa-4356-b608-e44e5a15c081" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1128.770533] env[61868]: DEBUG nova.network.neutron [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1128.837903] env[61868]: DEBUG nova.network.neutron [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1128.982698] env[61868]: DEBUG nova.network.neutron [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Updating instance_info_cache with network_info: [{"id": "7d704867-786c-4653-bb82-7432f6f5dc3d", "address": "fa:16:3e:a3:1a:1a", "network": {"id": "ab0988ff-0ccf-4c6c-96cb-138304c986e8", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1349158406-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {}}], "meta": {"injected": false, "tenant_id": "e82eb11d22f443dc82bd83a4d58571bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c13fd8bc-e797-42fe-94ed-6370d3467a7f", "external-id": "nsx-vlan-transportzone-833", "segmentation_id": 833, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d704867-78", "ovs_interfaceid": "7d704867-786c-4653-bb82-7432f6f5dc3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.992067] env[61868]: DEBUG nova.compute.manager [req-7cd16f24-eeb3-4019-8cd6-9ca0ab8a2927 req-43146fe9-8927-4a57-8e67-a36721dfbad0 service nova] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Received event network-vif-plugged-7d704867-786c-4653-bb82-7432f6f5dc3d {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1128.992338] env[61868]: DEBUG oslo_concurrency.lockutils [req-7cd16f24-eeb3-4019-8cd6-9ca0ab8a2927 req-43146fe9-8927-4a57-8e67-a36721dfbad0 service nova] Acquiring lock "b2dbce45-4bfa-4356-b608-e44e5a15c081-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1128.992593] env[61868]: DEBUG oslo_concurrency.lockutils [req-7cd16f24-eeb3-4019-8cd6-9ca0ab8a2927 req-43146fe9-8927-4a57-8e67-a36721dfbad0 service nova] Lock "b2dbce45-4bfa-4356-b608-e44e5a15c081-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1128.992783] env[61868]: DEBUG oslo_concurrency.lockutils [req-7cd16f24-eeb3-4019-8cd6-9ca0ab8a2927 req-43146fe9-8927-4a57-8e67-a36721dfbad0 service nova] Lock "b2dbce45-4bfa-4356-b608-e44e5a15c081-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1128.993012] env[61868]: DEBUG nova.compute.manager [req-7cd16f24-eeb3-4019-8cd6-9ca0ab8a2927 req-43146fe9-8927-4a57-8e67-a36721dfbad0 service nova] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] No waiting events found dispatching network-vif-plugged-7d704867-786c-4653-bb82-7432f6f5dc3d {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1128.993210] env[61868]: WARNING nova.compute.manager [req-7cd16f24-eeb3-4019-8cd6-9ca0ab8a2927 req-43146fe9-8927-4a57-8e67-a36721dfbad0 service nova] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Received unexpected event network-vif-plugged-7d704867-786c-4653-bb82-7432f6f5dc3d for instance with vm_state building and task_state spawning. [ 1128.993409] env[61868]: DEBUG nova.compute.manager [req-7cd16f24-eeb3-4019-8cd6-9ca0ab8a2927 req-43146fe9-8927-4a57-8e67-a36721dfbad0 service nova] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Received event network-changed-7d704867-786c-4653-bb82-7432f6f5dc3d {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1128.993606] env[61868]: DEBUG nova.compute.manager [req-7cd16f24-eeb3-4019-8cd6-9ca0ab8a2927 req-43146fe9-8927-4a57-8e67-a36721dfbad0 service nova] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Refreshing instance network info cache due to event network-changed-7d704867-786c-4653-bb82-7432f6f5dc3d. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1128.993826] env[61868]: DEBUG oslo_concurrency.lockutils [req-7cd16f24-eeb3-4019-8cd6-9ca0ab8a2927 req-43146fe9-8927-4a57-8e67-a36721dfbad0 service nova] Acquiring lock "refresh_cache-b2dbce45-4bfa-4356-b608-e44e5a15c081" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1128.997565] env[61868]: DEBUG oslo_concurrency.lockutils [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Releasing lock "refresh_cache-b2dbce45-4bfa-4356-b608-e44e5a15c081" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1128.997846] env[61868]: DEBUG nova.compute.manager [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Instance network_info: |[{"id": "7d704867-786c-4653-bb82-7432f6f5dc3d", "address": "fa:16:3e:a3:1a:1a", "network": {"id": "ab0988ff-0ccf-4c6c-96cb-138304c986e8", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1349158406-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {}}], "meta": {"injected": false, "tenant_id": "e82eb11d22f443dc82bd83a4d58571bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c13fd8bc-e797-42fe-94ed-6370d3467a7f", "external-id": "nsx-vlan-transportzone-833", "segmentation_id": 833, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d704867-78", "ovs_interfaceid": "7d704867-786c-4653-bb82-7432f6f5dc3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1128.998114] env[61868]: DEBUG oslo_concurrency.lockutils [req-7cd16f24-eeb3-4019-8cd6-9ca0ab8a2927 req-43146fe9-8927-4a57-8e67-a36721dfbad0 service nova] Acquired lock "refresh_cache-b2dbce45-4bfa-4356-b608-e44e5a15c081" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1128.998409] env[61868]: DEBUG nova.network.neutron [req-7cd16f24-eeb3-4019-8cd6-9ca0ab8a2927 req-43146fe9-8927-4a57-8e67-a36721dfbad0 service nova] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Refreshing network info cache for port 7d704867-786c-4653-bb82-7432f6f5dc3d {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1129.000086] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a3:1a:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c13fd8bc-e797-42fe-94ed-6370d3467a7f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7d704867-786c-4653-bb82-7432f6f5dc3d', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1129.007993] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Creating folder: Project (e82eb11d22f443dc82bd83a4d58571bb). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1129.009395] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-51a48e37-962f-4a17-84b0-621a4c1fd2f0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.024975] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Created folder: Project (e82eb11d22f443dc82bd83a4d58571bb) in parent group-v18181. [ 1129.025185] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Creating folder: Instances. Parent ref: group-v18260. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1129.025435] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aeb1d9d0-5940-462c-bb2e-9dd77c3cbee0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.035705] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Created folder: Instances in parent group-v18260. [ 1129.035981] env[61868]: DEBUG oslo.service.loopingcall [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1129.036195] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1129.036404] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-82b461d5-c51f-4ae2-9d32-6e4a9a5415be {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.055923] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1129.055923] env[61868]: value = "task-41053" [ 1129.055923] env[61868]: _type = "Task" [ 1129.055923] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.063974] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41053, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.351244] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1129.364036] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1129.364264] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1129.364531] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1129.364763] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1129.366077] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d35383ae-266d-4ebc-9b83-0baf9c180a6f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.375903] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f4bf6a7-4d5d-4df4-9882-24be7194ca50 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.394195] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee40f070-c292-4ac8-b26d-25202729d76c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.403184] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06184db1-23d3-4096-87e3-f8581d1df01d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.435271] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181905MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1129.435565] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1129.435958] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1129.483701] env[61868]: DEBUG nova.network.neutron [req-7cd16f24-eeb3-4019-8cd6-9ca0ab8a2927 req-43146fe9-8927-4a57-8e67-a36721dfbad0 service nova] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Updated VIF entry in instance network info cache for port 7d704867-786c-4653-bb82-7432f6f5dc3d. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1129.484094] env[61868]: DEBUG nova.network.neutron [req-7cd16f24-eeb3-4019-8cd6-9ca0ab8a2927 req-43146fe9-8927-4a57-8e67-a36721dfbad0 service nova] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Updating instance_info_cache with network_info: [{"id": "7d704867-786c-4653-bb82-7432f6f5dc3d", "address": "fa:16:3e:a3:1a:1a", "network": {"id": "ab0988ff-0ccf-4c6c-96cb-138304c986e8", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1349158406-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {}}], "meta": {"injected": false, "tenant_id": "e82eb11d22f443dc82bd83a4d58571bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c13fd8bc-e797-42fe-94ed-6370d3467a7f", "external-id": "nsx-vlan-transportzone-833", "segmentation_id": 833, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d704867-78", "ovs_interfaceid": "7d704867-786c-4653-bb82-7432f6f5dc3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1129.500326] env[61868]: DEBUG oslo_concurrency.lockutils [req-7cd16f24-eeb3-4019-8cd6-9ca0ab8a2927 req-43146fe9-8927-4a57-8e67-a36721dfbad0 service nova] Releasing lock "refresh_cache-b2dbce45-4bfa-4356-b608-e44e5a15c081" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1129.518785] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 2466fe4e-2589-4417-a63a-4d8bc695109d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.519059] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 1df194bf-fa9b-4d03-9b20-8478147de566 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.519319] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4cec72dc-99c1-4cf9-b391-a909bab7fb23 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.519541] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 972ab1c7-03b0-4294-930c-8084674083ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.519712] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d6ac9ed4-56dd-493a-8d9f-0cfad210b6de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.519854] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 3394162c-605f-40a1-9dc8-dc5cba6a083f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.520072] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 26f77431-9a5d-444d-b345-10108c34b59b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.520218] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance a8e7708c-b9ee-465b-8df8-798983c6f06c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.520338] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance a7371133-1ff3-4016-84fc-a59a9ef6d445 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.520451] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance b2dbce45-4bfa-4356-b608-e44e5a15c081 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.533923] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.546810] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 6ff71cf5-4473-4e25-b6f0-b3da104ed9ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.557228] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d59d2416-ac39-4e06-a20c-b6f392da4af2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.568969] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41053, 'name': CreateVM_Task, 'duration_secs': 0.318442} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.569634] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance efbd7994-e03a-40ab-978d-316667f3e43c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.570679] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1129.571689] env[61868]: DEBUG oslo_concurrency.lockutils [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1129.571960] env[61868]: DEBUG oslo_concurrency.lockutils [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1129.575463] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-474d6fe3-63c6-48b5-a0ec-e804851b8e2c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.587045] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance bbbde7a6-3107-46b9-b2c9-a4873916f7b6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.612599] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Reconfiguring VM instance to enable vnc on port - 5900 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 1129.613663] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 652af8bc-d4b9-4a5a-bba5-7429e659133b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.615565] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3dcf2e58-8c25-4a12-80b6-3439b1899dee {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.629944] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 16eb032d-fe34-4a46-883c-8b937806d63f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.636528] env[61868]: DEBUG oslo_vmware.api [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Waiting for the task: (returnval){ [ 1129.636528] env[61868]: value = "task-41054" [ 1129.636528] env[61868]: _type = "Task" [ 1129.636528] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.649967] env[61868]: DEBUG oslo_vmware.api [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Task: {'id': task-41054, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.651449] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 649d7eda-b095-4bb0-962a-acb8dfa50516 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.666196] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 5c20c9b3-467c-4c82-9a30-883a4cd62e8e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.677976] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance e0c5de36-1d7f-49a0-a13e-faf0e8ae81a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.689753] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance e44bdea6-05c1-43c9-b019-d762df3a6451 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.702883] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 17b8aca6-ebe8-4a10-b724-2fa03d991d0c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.715235] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4618de15-8f2c-4165-8f23-a4a5542f3d0b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.726742] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 31f60d0d-900f-4034-b954-00a219e223e7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.738829] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ee1aa6fe-d7bd-412d-b5ae-663032c1b4c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.750525] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 05e4476d-5e31-4152-8db9-f24db047eb76 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.765872] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance f7e1c6c5-752e-4fef-b84f-232b2dbee4a1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.776801] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance eafa3522-51e3-4582-b060-3e3ac4224ae2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.777060] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1129.777208] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1130.129501] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f98ec99f-dedb-4cac-8f46-e47aec6d0a2e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.137512] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd299397-aebd-45fe-8e37-ef6dc0ac12a4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.149467] env[61868]: DEBUG oslo_vmware.api [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Task: {'id': task-41054, 'name': ReconfigVM_Task, 'duration_secs': 0.122615} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.173844] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Reconfigured VM instance to enable vnc on port - 5900 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 1130.174107] env[61868]: DEBUG oslo_concurrency.lockutils [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.602s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1130.174364] env[61868]: DEBUG oslo_concurrency.lockutils [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1130.174512] env[61868]: DEBUG oslo_concurrency.lockutils [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1130.174852] env[61868]: DEBUG oslo_concurrency.lockutils [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1130.175339] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-633f5dff-9692-4ff3-a52a-d26af638c88a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.177696] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7dcf5d0-ac66-46d2-8b8a-ff68ab880596 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.187078] env[61868]: DEBUG oslo_vmware.api [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Waiting for the task: (returnval){ [ 1130.187078] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]526dea38-10d9-2d8f-f113-64472fb091d3" [ 1130.187078] env[61868]: _type = "Task" [ 1130.187078] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.189092] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8ff355-b438-4973-813b-f75531d50315 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.207066] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1130.211618] env[61868]: DEBUG oslo_concurrency.lockutils [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1130.211873] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1130.212193] env[61868]: DEBUG oslo_concurrency.lockutils [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1130.215487] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1130.231895] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1130.232145] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.796s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1131.227840] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1131.228181] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1131.351777] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1131.352107] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Cleaning up deleted instances with incomplete migration {{(pid=61868) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 1132.362259] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.362561] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.362645] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 1133.995917] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Acquiring lock "c9f74904-0558-42e6-a454-c7103b2873b1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1133.996247] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Lock "c9f74904-0558-42e6-a454-c7103b2873b1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1134.962600] env[61868]: DEBUG oslo_concurrency.lockutils [None req-738a7270-3c2b-42d8-99cb-c4bad0dc21ea tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Acquiring lock "a8e7708c-b9ee-465b-8df8-798983c6f06c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1149.613186] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1149.635804] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Getting list of instances from cluster (obj){ [ 1149.635804] env[61868]: value = "domain-c8" [ 1149.635804] env[61868]: _type = "ClusterComputeResource" [ 1149.635804] env[61868]: } {{(pid=61868) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1149.637654] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2acaac4-bcd6-4f52-bc3d-48fad2487547 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.656846] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Got total of 10 instances {{(pid=61868) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1149.657040] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Triggering sync for uuid 2466fe4e-2589-4417-a63a-4d8bc695109d {{(pid=61868) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10342}} [ 1149.657234] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Triggering sync for uuid 1df194bf-fa9b-4d03-9b20-8478147de566 {{(pid=61868) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10342}} [ 1149.657388] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Triggering sync for uuid 4cec72dc-99c1-4cf9-b391-a909bab7fb23 {{(pid=61868) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10342}} [ 1149.657539] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Triggering sync for uuid 972ab1c7-03b0-4294-930c-8084674083ba {{(pid=61868) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10342}} [ 1149.657686] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Triggering sync for uuid d6ac9ed4-56dd-493a-8d9f-0cfad210b6de {{(pid=61868) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10342}} [ 1149.657889] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Triggering sync for uuid 3394162c-605f-40a1-9dc8-dc5cba6a083f {{(pid=61868) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10342}} [ 1149.658059] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Triggering sync for uuid 26f77431-9a5d-444d-b345-10108c34b59b {{(pid=61868) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10342}} [ 1149.658205] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Triggering sync for uuid a8e7708c-b9ee-465b-8df8-798983c6f06c {{(pid=61868) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10342}} [ 1149.658348] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Triggering sync for uuid a7371133-1ff3-4016-84fc-a59a9ef6d445 {{(pid=61868) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10342}} [ 1149.658491] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Triggering sync for uuid b2dbce45-4bfa-4356-b608-e44e5a15c081 {{(pid=61868) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10342}} [ 1149.658869] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "2466fe4e-2589-4417-a63a-4d8bc695109d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1149.659102] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "1df194bf-fa9b-4d03-9b20-8478147de566" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1149.659482] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "4cec72dc-99c1-4cf9-b391-a909bab7fb23" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1149.659727] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "972ab1c7-03b0-4294-930c-8084674083ba" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1149.659925] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "d6ac9ed4-56dd-493a-8d9f-0cfad210b6de" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1149.660152] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "3394162c-605f-40a1-9dc8-dc5cba6a083f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1149.660347] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "26f77431-9a5d-444d-b345-10108c34b59b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1149.660532] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "a8e7708c-b9ee-465b-8df8-798983c6f06c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1149.660935] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "a7371133-1ff3-4016-84fc-a59a9ef6d445" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1149.661070] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "b2dbce45-4bfa-4356-b608-e44e5a15c081" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1174.897635] env[61868]: WARNING oslo_vmware.rw_handles [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1174.897635] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1174.897635] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1174.897635] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1174.897635] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1174.897635] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 1174.897635] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1174.897635] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1174.897635] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1174.897635] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1174.897635] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1174.897635] env[61868]: ERROR oslo_vmware.rw_handles [ 1174.898378] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/1a8c0536-6fbe-45b4-a94f-acd4bf805b46/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1174.900454] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1174.900764] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Copying Virtual Disk [datastore2] vmware_temp/1a8c0536-6fbe-45b4-a94f-acd4bf805b46/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/1a8c0536-6fbe-45b4-a94f-acd4bf805b46/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1174.901101] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7d2ee7ea-acc4-45f0-a812-5206f3db5db0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.909271] env[61868]: DEBUG oslo_vmware.api [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Waiting for the task: (returnval){ [ 1174.909271] env[61868]: value = "task-41055" [ 1174.909271] env[61868]: _type = "Task" [ 1174.909271] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.918579] env[61868]: DEBUG oslo_vmware.api [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Task: {'id': task-41055, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.420130] env[61868]: DEBUG oslo_vmware.exceptions [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1175.420406] env[61868]: DEBUG oslo_concurrency.lockutils [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1175.420983] env[61868]: ERROR nova.compute.manager [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1175.420983] env[61868]: Faults: ['InvalidArgument'] [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Traceback (most recent call last): [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] yield resources [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] self.driver.spawn(context, instance, image_meta, [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] self._fetch_image_if_missing(context, vi) [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] image_cache(vi, tmp_image_ds_loc) [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] vm_util.copy_virtual_disk( [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] session._wait_for_task(vmdk_copy_task) [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] return self.wait_for_task(task_ref) [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] return evt.wait() [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] result = hub.switch() [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] return self.greenlet.switch() [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] self.f(*self.args, **self.kw) [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] raise exceptions.translate_fault(task_info.error) [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Faults: ['InvalidArgument'] [ 1175.420983] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] [ 1175.422031] env[61868]: INFO nova.compute.manager [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Terminating instance [ 1175.422891] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1175.423092] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1175.424033] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-29eda6c6-f1f4-48e2-b1e0-d410445cb5cd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.426149] env[61868]: DEBUG nova.compute.manager [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1175.426338] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1175.427102] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-935c3f4f-93e9-465d-a377-d3a8c89d9a73 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.434731] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1175.434950] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3bf9d221-0d50-440d-8649-d7481936827d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.437553] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1175.437773] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1175.438744] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8f3cd9b-f302-4cb1-994e-2a24d37b98ac {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.443809] env[61868]: DEBUG oslo_vmware.api [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Waiting for the task: (returnval){ [ 1175.443809] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52a5344e-9f14-0f83-0108-aab310cd9cb7" [ 1175.443809] env[61868]: _type = "Task" [ 1175.443809] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.452352] env[61868]: DEBUG oslo_vmware.api [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52a5344e-9f14-0f83-0108-aab310cd9cb7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.505950] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1175.506235] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1175.506297] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Deleting the datastore file [datastore2] 2466fe4e-2589-4417-a63a-4d8bc695109d {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1175.506531] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dbc54064-8f8f-4eec-ae2a-99e64cdc4d5e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.514527] env[61868]: DEBUG oslo_vmware.api [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Waiting for the task: (returnval){ [ 1175.514527] env[61868]: value = "task-41057" [ 1175.514527] env[61868]: _type = "Task" [ 1175.514527] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.524816] env[61868]: DEBUG oslo_vmware.api [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Task: {'id': task-41057, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.954619] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1175.954965] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Creating directory with path [datastore2] vmware_temp/e8af76df-0328-41de-96a9-012da2f54854/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1175.955293] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1debbeb-5edd-416a-aaa9-f6141c242344 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.968955] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Created directory with path [datastore2] vmware_temp/e8af76df-0328-41de-96a9-012da2f54854/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1175.969194] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Fetch image to [datastore2] vmware_temp/e8af76df-0328-41de-96a9-012da2f54854/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1175.969371] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/e8af76df-0328-41de-96a9-012da2f54854/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1175.970191] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deedef74-9f42-455a-89c8-d7edbe366a93 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.978122] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d99ab0-da90-47ca-9d69-c580fe99d164 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.988140] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccbc857c-a0bf-4106-9c81-cc95514f0b67 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.024577] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0059fd3d-2bc1-4db7-a3e1-ebc42956dc29 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.032432] env[61868]: DEBUG oslo_vmware.api [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Task: {'id': task-41057, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080674} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.034056] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1176.034241] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1176.034409] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1176.034582] env[61868]: INFO nova.compute.manager [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1176.036434] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c47eb69b-9250-4cd5-a3e7-ca61a654f17e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.038458] env[61868]: DEBUG nova.compute.claims [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1176.038625] env[61868]: DEBUG oslo_concurrency.lockutils [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1176.038877] env[61868]: DEBUG oslo_concurrency.lockutils [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1176.064300] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1176.126238] env[61868]: DEBUG oslo_vmware.rw_handles [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e8af76df-0328-41de-96a9-012da2f54854/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1176.183781] env[61868]: DEBUG oslo_vmware.rw_handles [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1176.184054] env[61868]: DEBUG oslo_vmware.rw_handles [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e8af76df-0328-41de-96a9-012da2f54854/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1176.550974] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-015fccbe-8303-46f9-b276-53c047109f87 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.560076] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdc91d3c-96ba-4dd3-9f2c-2af29e9e774a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.601872] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5be22b8-ea47-4e18-9f6b-551db9fd83cf {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.610862] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-775b15d9-7e07-4e4d-ab2a-8f7223c19c99 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.625783] env[61868]: DEBUG nova.compute.provider_tree [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1176.634242] env[61868]: DEBUG nova.scheduler.client.report [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1176.652745] env[61868]: DEBUG oslo_concurrency.lockutils [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.614s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1176.653318] env[61868]: ERROR nova.compute.manager [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1176.653318] env[61868]: Faults: ['InvalidArgument'] [ 1176.653318] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Traceback (most recent call last): [ 1176.653318] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1176.653318] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] self.driver.spawn(context, instance, image_meta, [ 1176.653318] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1176.653318] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1176.653318] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1176.653318] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] self._fetch_image_if_missing(context, vi) [ 1176.653318] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1176.653318] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] image_cache(vi, tmp_image_ds_loc) [ 1176.653318] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1176.653318] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] vm_util.copy_virtual_disk( [ 1176.653318] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1176.653318] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] session._wait_for_task(vmdk_copy_task) [ 1176.653318] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1176.653318] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] return self.wait_for_task(task_ref) [ 1176.653318] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1176.653318] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] return evt.wait() [ 1176.653318] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1176.653318] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] result = hub.switch() [ 1176.653318] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1176.653318] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] return self.greenlet.switch() [ 1176.653318] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1176.653318] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] self.f(*self.args, **self.kw) [ 1176.653318] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1176.653318] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] raise exceptions.translate_fault(task_info.error) [ 1176.653318] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1176.653318] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Faults: ['InvalidArgument'] [ 1176.653318] env[61868]: ERROR nova.compute.manager [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] [ 1176.654232] env[61868]: DEBUG nova.compute.utils [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1176.655524] env[61868]: DEBUG nova.compute.manager [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Build of instance 2466fe4e-2589-4417-a63a-4d8bc695109d was re-scheduled: A specified parameter was not correct: fileType [ 1176.655524] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1176.655902] env[61868]: DEBUG nova.compute.manager [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1176.656090] env[61868]: DEBUG nova.compute.manager [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1176.656269] env[61868]: DEBUG nova.compute.manager [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1176.656436] env[61868]: DEBUG nova.network.neutron [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1176.932270] env[61868]: DEBUG nova.network.neutron [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1176.945286] env[61868]: INFO nova.compute.manager [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Took 0.29 seconds to deallocate network for instance. [ 1177.051274] env[61868]: INFO nova.scheduler.client.report [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Deleted allocations for instance 2466fe4e-2589-4417-a63a-4d8bc695109d [ 1177.080625] env[61868]: DEBUG oslo_concurrency.lockutils [None req-73a72652-ded3-4f8e-bf78-f0b9d3b43d84 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Lock "2466fe4e-2589-4417-a63a-4d8bc695109d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 567.499s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1177.082026] env[61868]: DEBUG oslo_concurrency.lockutils [None req-756ea6cd-2549-494f-94ac-4490abbc0297 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Lock "2466fe4e-2589-4417-a63a-4d8bc695109d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 369.042s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1177.082162] env[61868]: DEBUG oslo_concurrency.lockutils [None req-756ea6cd-2549-494f-94ac-4490abbc0297 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Acquiring lock "2466fe4e-2589-4417-a63a-4d8bc695109d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1177.082311] env[61868]: DEBUG oslo_concurrency.lockutils [None req-756ea6cd-2549-494f-94ac-4490abbc0297 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Lock "2466fe4e-2589-4417-a63a-4d8bc695109d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1177.082459] env[61868]: DEBUG oslo_concurrency.lockutils [None req-756ea6cd-2549-494f-94ac-4490abbc0297 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Lock "2466fe4e-2589-4417-a63a-4d8bc695109d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1177.084683] env[61868]: INFO nova.compute.manager [None req-756ea6cd-2549-494f-94ac-4490abbc0297 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Terminating instance [ 1177.086643] env[61868]: DEBUG nova.compute.manager [None req-756ea6cd-2549-494f-94ac-4490abbc0297 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1177.086891] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-756ea6cd-2549-494f-94ac-4490abbc0297 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1177.088134] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-09205450-10d1-4ed9-9a44-3a577dd5779c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.098072] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9041b973-a5c3-42db-a813-b0f1c3e45337 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.109841] env[61868]: DEBUG nova.compute.manager [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1177.139297] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-756ea6cd-2549-494f-94ac-4490abbc0297 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2466fe4e-2589-4417-a63a-4d8bc695109d could not be found. [ 1177.139579] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-756ea6cd-2549-494f-94ac-4490abbc0297 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1177.139799] env[61868]: INFO nova.compute.manager [None req-756ea6cd-2549-494f-94ac-4490abbc0297 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1177.140073] env[61868]: DEBUG oslo.service.loopingcall [None req-756ea6cd-2549-494f-94ac-4490abbc0297 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1177.140379] env[61868]: DEBUG nova.compute.manager [-] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1177.140479] env[61868]: DEBUG nova.network.neutron [-] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1177.169040] env[61868]: DEBUG nova.network.neutron [-] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1177.176060] env[61868]: DEBUG oslo_concurrency.lockutils [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1177.176329] env[61868]: DEBUG oslo_concurrency.lockutils [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1177.178221] env[61868]: INFO nova.compute.claims [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1177.181809] env[61868]: INFO nova.compute.manager [-] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] Took 0.04 seconds to deallocate network for instance. [ 1177.294820] env[61868]: DEBUG oslo_concurrency.lockutils [None req-756ea6cd-2549-494f-94ac-4490abbc0297 tempest-ImagesNegativeTestJSON-326110035 tempest-ImagesNegativeTestJSON-326110035-project-member] Lock "2466fe4e-2589-4417-a63a-4d8bc695109d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.213s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1177.295660] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "2466fe4e-2589-4417-a63a-4d8bc695109d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 27.637s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1177.295853] env[61868]: INFO nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 2466fe4e-2589-4417-a63a-4d8bc695109d] During sync_power_state the instance has a pending task (deleting). Skip. [ 1177.296040] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "2466fe4e-2589-4417-a63a-4d8bc695109d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1177.642158] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f0eb9cc-5653-4124-9fbc-7fa187e6e23a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.651560] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b5ce8ca-12b7-4174-ad3e-e22c375691fd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.682240] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-371efa1f-92c5-48a6-b4f3-c3bd400326b1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.690247] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-781deb4e-f62a-4fc0-8156-dc17bb238034 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.703344] env[61868]: DEBUG nova.compute.provider_tree [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1177.712107] env[61868]: DEBUG nova.scheduler.client.report [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1177.730274] env[61868]: DEBUG oslo_concurrency.lockutils [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.554s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1177.731492] env[61868]: DEBUG nova.compute.manager [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1177.767647] env[61868]: DEBUG nova.compute.utils [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1177.769010] env[61868]: DEBUG nova.compute.manager [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1177.769182] env[61868]: DEBUG nova.network.neutron [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1177.781799] env[61868]: DEBUG nova.compute.manager [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1177.820502] env[61868]: DEBUG nova.policy [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a91b3a84ec5c48d896a5bf3d8c568343', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9eabc0f9c1604e90b373219843edfc8d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 1177.859214] env[61868]: DEBUG nova.compute.manager [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1177.885479] env[61868]: DEBUG nova.virt.hardware [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1177.885732] env[61868]: DEBUG nova.virt.hardware [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1177.885893] env[61868]: DEBUG nova.virt.hardware [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1177.886219] env[61868]: DEBUG nova.virt.hardware [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1177.886219] env[61868]: DEBUG nova.virt.hardware [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1177.886372] env[61868]: DEBUG nova.virt.hardware [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1177.886581] env[61868]: DEBUG nova.virt.hardware [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1177.886791] env[61868]: DEBUG nova.virt.hardware [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1177.886987] env[61868]: DEBUG nova.virt.hardware [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1177.887155] env[61868]: DEBUG nova.virt.hardware [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1177.887327] env[61868]: DEBUG nova.virt.hardware [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1177.888522] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0c71608-fb72-4ce4-8410-5319b86e3aef {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.896667] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-919cf8be-bf52-42a9-8fd6-dfe54eb1a340 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.110630] env[61868]: DEBUG nova.network.neutron [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Successfully created port: a7f28a4a-d2e3-4b3e-9c7d-d7ceaf846b2e {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1178.695112] env[61868]: DEBUG nova.network.neutron [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Successfully updated port: a7f28a4a-d2e3-4b3e-9c7d-d7ceaf846b2e {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1178.706016] env[61868]: DEBUG oslo_concurrency.lockutils [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "refresh_cache-8ee97cc3-458c-4fd5-8f67-f7e877b44b0d" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1178.706163] env[61868]: DEBUG oslo_concurrency.lockutils [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquired lock "refresh_cache-8ee97cc3-458c-4fd5-8f67-f7e877b44b0d" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1178.706318] env[61868]: DEBUG nova.network.neutron [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1178.753634] env[61868]: DEBUG nova.network.neutron [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1178.927756] env[61868]: DEBUG nova.network.neutron [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Updating instance_info_cache with network_info: [{"id": "a7f28a4a-d2e3-4b3e-9c7d-d7ceaf846b2e", "address": "fa:16:3e:47:4c:55", "network": {"id": "1018eb28-650c-4602-95a9-5e1826ff57fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-229121660-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "9eabc0f9c1604e90b373219843edfc8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7f28a4a-d2", "ovs_interfaceid": "a7f28a4a-d2e3-4b3e-9c7d-d7ceaf846b2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.943431] env[61868]: DEBUG oslo_concurrency.lockutils [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Releasing lock "refresh_cache-8ee97cc3-458c-4fd5-8f67-f7e877b44b0d" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1178.943815] env[61868]: DEBUG nova.compute.manager [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Instance network_info: |[{"id": "a7f28a4a-d2e3-4b3e-9c7d-d7ceaf846b2e", "address": "fa:16:3e:47:4c:55", "network": {"id": "1018eb28-650c-4602-95a9-5e1826ff57fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-229121660-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "9eabc0f9c1604e90b373219843edfc8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7f28a4a-d2", "ovs_interfaceid": "a7f28a4a-d2e3-4b3e-9c7d-d7ceaf846b2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1178.944646] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:4c:55', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a7f28a4a-d2e3-4b3e-9c7d-d7ceaf846b2e', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1178.952133] env[61868]: DEBUG oslo.service.loopingcall [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1178.952684] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1178.952924] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7bfd670c-be20-4315-b0bd-9e7e720afd81 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.975778] env[61868]: DEBUG nova.compute.manager [req-dbc7e62c-f952-4f87-b6b6-f4086cce0a5e req-8290467c-dc6e-4fc2-8d72-83cca6abd9d6 service nova] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Received event network-vif-plugged-a7f28a4a-d2e3-4b3e-9c7d-d7ceaf846b2e {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1178.976028] env[61868]: DEBUG oslo_concurrency.lockutils [req-dbc7e62c-f952-4f87-b6b6-f4086cce0a5e req-8290467c-dc6e-4fc2-8d72-83cca6abd9d6 service nova] Acquiring lock "8ee97cc3-458c-4fd5-8f67-f7e877b44b0d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1178.976313] env[61868]: DEBUG oslo_concurrency.lockutils [req-dbc7e62c-f952-4f87-b6b6-f4086cce0a5e req-8290467c-dc6e-4fc2-8d72-83cca6abd9d6 service nova] Lock "8ee97cc3-458c-4fd5-8f67-f7e877b44b0d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1178.976489] env[61868]: DEBUG oslo_concurrency.lockutils [req-dbc7e62c-f952-4f87-b6b6-f4086cce0a5e req-8290467c-dc6e-4fc2-8d72-83cca6abd9d6 service nova] Lock "8ee97cc3-458c-4fd5-8f67-f7e877b44b0d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1178.976656] env[61868]: DEBUG nova.compute.manager [req-dbc7e62c-f952-4f87-b6b6-f4086cce0a5e req-8290467c-dc6e-4fc2-8d72-83cca6abd9d6 service nova] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] No waiting events found dispatching network-vif-plugged-a7f28a4a-d2e3-4b3e-9c7d-d7ceaf846b2e {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1178.976821] env[61868]: WARNING nova.compute.manager [req-dbc7e62c-f952-4f87-b6b6-f4086cce0a5e req-8290467c-dc6e-4fc2-8d72-83cca6abd9d6 service nova] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Received unexpected event network-vif-plugged-a7f28a4a-d2e3-4b3e-9c7d-d7ceaf846b2e for instance with vm_state building and task_state spawning. [ 1178.976981] env[61868]: DEBUG nova.compute.manager [req-dbc7e62c-f952-4f87-b6b6-f4086cce0a5e req-8290467c-dc6e-4fc2-8d72-83cca6abd9d6 service nova] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Received event network-changed-a7f28a4a-d2e3-4b3e-9c7d-d7ceaf846b2e {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1178.977132] env[61868]: DEBUG nova.compute.manager [req-dbc7e62c-f952-4f87-b6b6-f4086cce0a5e req-8290467c-dc6e-4fc2-8d72-83cca6abd9d6 service nova] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Refreshing instance network info cache due to event network-changed-a7f28a4a-d2e3-4b3e-9c7d-d7ceaf846b2e. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1178.977308] env[61868]: DEBUG oslo_concurrency.lockutils [req-dbc7e62c-f952-4f87-b6b6-f4086cce0a5e req-8290467c-dc6e-4fc2-8d72-83cca6abd9d6 service nova] Acquiring lock "refresh_cache-8ee97cc3-458c-4fd5-8f67-f7e877b44b0d" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1178.977440] env[61868]: DEBUG oslo_concurrency.lockutils [req-dbc7e62c-f952-4f87-b6b6-f4086cce0a5e req-8290467c-dc6e-4fc2-8d72-83cca6abd9d6 service nova] Acquired lock "refresh_cache-8ee97cc3-458c-4fd5-8f67-f7e877b44b0d" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1178.977625] env[61868]: DEBUG nova.network.neutron [req-dbc7e62c-f952-4f87-b6b6-f4086cce0a5e req-8290467c-dc6e-4fc2-8d72-83cca6abd9d6 service nova] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Refreshing network info cache for port a7f28a4a-d2e3-4b3e-9c7d-d7ceaf846b2e {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1178.980526] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1178.980526] env[61868]: value = "task-41058" [ 1178.980526] env[61868]: _type = "Task" [ 1178.980526] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.993570] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41058, 'name': CreateVM_Task} progress is 5%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.258227] env[61868]: DEBUG nova.network.neutron [req-dbc7e62c-f952-4f87-b6b6-f4086cce0a5e req-8290467c-dc6e-4fc2-8d72-83cca6abd9d6 service nova] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Updated VIF entry in instance network info cache for port a7f28a4a-d2e3-4b3e-9c7d-d7ceaf846b2e. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1179.258650] env[61868]: DEBUG nova.network.neutron [req-dbc7e62c-f952-4f87-b6b6-f4086cce0a5e req-8290467c-dc6e-4fc2-8d72-83cca6abd9d6 service nova] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Updating instance_info_cache with network_info: [{"id": "a7f28a4a-d2e3-4b3e-9c7d-d7ceaf846b2e", "address": "fa:16:3e:47:4c:55", "network": {"id": "1018eb28-650c-4602-95a9-5e1826ff57fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-229121660-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "9eabc0f9c1604e90b373219843edfc8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7f28a4a-d2", "ovs_interfaceid": "a7f28a4a-d2e3-4b3e-9c7d-d7ceaf846b2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1179.269567] env[61868]: DEBUG oslo_concurrency.lockutils [req-dbc7e62c-f952-4f87-b6b6-f4086cce0a5e req-8290467c-dc6e-4fc2-8d72-83cca6abd9d6 service nova] Releasing lock "refresh_cache-8ee97cc3-458c-4fd5-8f67-f7e877b44b0d" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1179.490323] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41058, 'name': CreateVM_Task, 'duration_secs': 0.303087} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.490504] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1179.491145] env[61868]: DEBUG oslo_concurrency.lockutils [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1179.491385] env[61868]: DEBUG oslo_concurrency.lockutils [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1179.494265] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8766a6b4-d08f-4811-a5aa-0397e01824d3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.528274] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Reconfiguring VM instance to enable vnc on port - 5901 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 1179.528676] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0b0aa78-84df-438b-88c7-df5a0b166b0a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.546443] env[61868]: DEBUG oslo_vmware.api [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Waiting for the task: (returnval){ [ 1179.546443] env[61868]: value = "task-41059" [ 1179.546443] env[61868]: _type = "Task" [ 1179.546443] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.556575] env[61868]: DEBUG oslo_vmware.api [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Task: {'id': task-41059, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.719232] env[61868]: DEBUG oslo_concurrency.lockutils [None req-628441ca-9aea-47b8-b2f6-7ca0884b0a10 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Acquiring lock "a7371133-1ff3-4016-84fc-a59a9ef6d445" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1180.057049] env[61868]: DEBUG oslo_vmware.api [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Task: {'id': task-41059, 'name': ReconfigVM_Task, 'duration_secs': 0.11505} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.057354] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Reconfigured VM instance to enable vnc on port - 5901 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 1180.057568] env[61868]: DEBUG oslo_concurrency.lockutils [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.566s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1180.057829] env[61868]: DEBUG oslo_concurrency.lockutils [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1180.057979] env[61868]: DEBUG oslo_concurrency.lockutils [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1180.058329] env[61868]: DEBUG oslo_concurrency.lockutils [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1180.058665] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da99a263-c570-4652-928b-702317711067 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.064843] env[61868]: DEBUG oslo_vmware.api [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Waiting for the task: (returnval){ [ 1180.064843] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52b565db-e3d7-eb7c-7d76-160fd8e2ba4e" [ 1180.064843] env[61868]: _type = "Task" [ 1180.064843] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.074385] env[61868]: DEBUG oslo_vmware.api [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52b565db-e3d7-eb7c-7d76-160fd8e2ba4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.575514] env[61868]: DEBUG oslo_concurrency.lockutils [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1180.575870] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1180.575992] env[61868]: DEBUG oslo_concurrency.lockutils [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1184.399853] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1188.352482] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1188.352827] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1188.352827] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 1188.374005] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1188.374195] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1188.374299] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1188.374423] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1188.374549] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1188.374670] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1188.374791] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1188.374911] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1188.375030] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1188.375151] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1188.375269] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 1188.375811] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1189.351088] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1190.346923] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1190.351368] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1191.351029] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1191.362424] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1191.362424] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1191.362424] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1191.362644] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1191.363710] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7070eba6-df4c-4373-a231-3eafaa888fbf {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.372908] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a23a42b8-c9ca-4aa4-b1a5-13cf41852f68 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.387203] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fb18288-6a22-466b-b559-810534db28b7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.394780] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d565d0c9-92ea-418d-a3d7-b8402dab2ccb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.429019] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181935MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1191.429199] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1191.429379] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1191.496935] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 1df194bf-fa9b-4d03-9b20-8478147de566 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1191.497104] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4cec72dc-99c1-4cf9-b391-a909bab7fb23 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1191.497228] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 972ab1c7-03b0-4294-930c-8084674083ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1191.497347] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d6ac9ed4-56dd-493a-8d9f-0cfad210b6de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1191.497465] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 3394162c-605f-40a1-9dc8-dc5cba6a083f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1191.497577] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 26f77431-9a5d-444d-b345-10108c34b59b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1191.497690] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance a8e7708c-b9ee-465b-8df8-798983c6f06c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1191.497810] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance a7371133-1ff3-4016-84fc-a59a9ef6d445 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1191.497950] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance b2dbce45-4bfa-4356-b608-e44e5a15c081 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1191.498063] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1191.509926] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 6ff71cf5-4473-4e25-b6f0-b3da104ed9ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1191.521941] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d59d2416-ac39-4e06-a20c-b6f392da4af2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1191.532253] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance efbd7994-e03a-40ab-978d-316667f3e43c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1191.542587] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance bbbde7a6-3107-46b9-b2c9-a4873916f7b6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1191.551829] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 652af8bc-d4b9-4a5a-bba5-7429e659133b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1191.562102] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 16eb032d-fe34-4a46-883c-8b937806d63f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1191.571984] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 649d7eda-b095-4bb0-962a-acb8dfa50516 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1191.582378] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 5c20c9b3-467c-4c82-9a30-883a4cd62e8e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1191.592892] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance e0c5de36-1d7f-49a0-a13e-faf0e8ae81a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1191.602749] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance e44bdea6-05c1-43c9-b019-d762df3a6451 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1191.612844] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 17b8aca6-ebe8-4a10-b724-2fa03d991d0c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1191.623102] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4618de15-8f2c-4165-8f23-a4a5542f3d0b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1191.633735] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 31f60d0d-900f-4034-b954-00a219e223e7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1191.645152] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ee1aa6fe-d7bd-412d-b5ae-663032c1b4c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1191.654929] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 05e4476d-5e31-4152-8db9-f24db047eb76 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1191.666170] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance f7e1c6c5-752e-4fef-b84f-232b2dbee4a1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1191.676885] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance eafa3522-51e3-4582-b060-3e3ac4224ae2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1191.689614] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance c9f74904-0558-42e6-a454-c7103b2873b1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1191.689861] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1191.690008] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1192.046535] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3248c040-6a70-4016-a835-f1971aabca65 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.055035] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5655b647-edd6-4fec-bd7b-fc6ceda72e33 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.088606] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb960bf-6e3b-48a0-bceb-4d558570dd09 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.098167] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92cfb59-8cd1-4496-a700-65fa8f27d813 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.113201] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1192.122481] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1192.140318] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1192.140512] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.711s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1194.141507] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1194.141824] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 1194.352465] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1196.297082] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a0e6f152-3a1e-4173-a369-13163758e35a tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Acquiring lock "b2dbce45-4bfa-4356-b608-e44e5a15c081" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1199.034186] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8673bbf2-d4c4-4289-b648-0f022772fe01 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "8ee97cc3-458c-4fd5-8f67-f7e877b44b0d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1218.710158] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Acquiring lock "efb3f108-d3b3-4ebf-a51f-84dc8274f857" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1218.710445] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Lock "efb3f108-d3b3-4ebf-a51f-84dc8274f857" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1224.294729] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "ebd4070e-7944-4d2f-8668-01d0ceca0c67" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1224.295212] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "ebd4070e-7944-4d2f-8668-01d0ceca0c67" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1224.911107] env[61868]: WARNING oslo_vmware.rw_handles [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1224.911107] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1224.911107] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1224.911107] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1224.911107] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1224.911107] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 1224.911107] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1224.911107] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1224.911107] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1224.911107] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1224.911107] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1224.911107] env[61868]: ERROR oslo_vmware.rw_handles [ 1224.911777] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/e8af76df-0328-41de-96a9-012da2f54854/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1224.913489] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1224.913752] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Copying Virtual Disk [datastore2] vmware_temp/e8af76df-0328-41de-96a9-012da2f54854/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/e8af76df-0328-41de-96a9-012da2f54854/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1224.914049] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5d5394f8-cc50-439c-977c-43fc78c39207 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.923510] env[61868]: DEBUG oslo_vmware.api [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Waiting for the task: (returnval){ [ 1224.923510] env[61868]: value = "task-41060" [ 1224.923510] env[61868]: _type = "Task" [ 1224.923510] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.931382] env[61868]: DEBUG oslo_vmware.api [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Task: {'id': task-41060, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.435502] env[61868]: DEBUG oslo_vmware.exceptions [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1225.435842] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1225.436391] env[61868]: ERROR nova.compute.manager [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1225.436391] env[61868]: Faults: ['InvalidArgument'] [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Traceback (most recent call last): [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] yield resources [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] self.driver.spawn(context, instance, image_meta, [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] self._fetch_image_if_missing(context, vi) [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] image_cache(vi, tmp_image_ds_loc) [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] vm_util.copy_virtual_disk( [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] session._wait_for_task(vmdk_copy_task) [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] return self.wait_for_task(task_ref) [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] return evt.wait() [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] result = hub.switch() [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] return self.greenlet.switch() [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] self.f(*self.args, **self.kw) [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] raise exceptions.translate_fault(task_info.error) [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Faults: ['InvalidArgument'] [ 1225.436391] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] [ 1225.437496] env[61868]: INFO nova.compute.manager [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Terminating instance [ 1225.438698] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1225.438956] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1225.439589] env[61868]: DEBUG nova.compute.manager [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1225.439773] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1225.440063] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5931254-5ec1-4a16-b88c-245e92281a24 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.442697] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d12c317-d558-4dc3-a4e2-a1da83280aa3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.450372] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1225.450631] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-18cd0ae7-095b-4fa6-a1ef-a1dbe67830a5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.453039] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1225.453153] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1225.454158] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53d0d743-3877-4019-9459-9811552a6025 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.459202] env[61868]: DEBUG oslo_vmware.api [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Waiting for the task: (returnval){ [ 1225.459202] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52b162ab-11e6-7d51-ee4d-e15a7f235c53" [ 1225.459202] env[61868]: _type = "Task" [ 1225.459202] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.467142] env[61868]: DEBUG oslo_vmware.api [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52b162ab-11e6-7d51-ee4d-e15a7f235c53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.521355] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1225.521582] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1225.521780] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Deleting the datastore file [datastore2] 1df194bf-fa9b-4d03-9b20-8478147de566 {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1225.522077] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-db772c68-bb13-4480-9b62-3dbb13cb0b4e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.530409] env[61868]: DEBUG oslo_vmware.api [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Waiting for the task: (returnval){ [ 1225.530409] env[61868]: value = "task-41062" [ 1225.530409] env[61868]: _type = "Task" [ 1225.530409] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.540669] env[61868]: DEBUG oslo_vmware.api [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Task: {'id': task-41062, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.970393] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1225.970658] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Creating directory with path [datastore2] vmware_temp/13f3f1a3-7563-40c1-9f21-591a0ac1c027/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1225.971195] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-53db393e-8c2c-4d61-a967-4afeceec56af {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.986811] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Created directory with path [datastore2] vmware_temp/13f3f1a3-7563-40c1-9f21-591a0ac1c027/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1225.987045] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Fetch image to [datastore2] vmware_temp/13f3f1a3-7563-40c1-9f21-591a0ac1c027/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1225.987208] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/13f3f1a3-7563-40c1-9f21-591a0ac1c027/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1225.987986] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-970f2710-426f-4e0a-a1f8-9a8dce2dd3b6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.995428] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76a7e844-2c77-4593-8125-4e386f48f7b1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.005185] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84815d96-7f95-4da1-962e-c046b66866ad {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.040938] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f22edb-ee46-4227-8fd8-5fed7e4b7464 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.049780] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a969f192-ee31-45ee-96d6-4812b96529ba {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.051539] env[61868]: DEBUG oslo_vmware.api [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Task: {'id': task-41062, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078481} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.051793] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1226.051985] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1226.052170] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1226.052341] env[61868]: INFO nova.compute.manager [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1226.054489] env[61868]: DEBUG nova.compute.claims [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1226.054664] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1226.055062] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1226.073524] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1226.307908] env[61868]: DEBUG oslo_vmware.rw_handles [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/13f3f1a3-7563-40c1-9f21-591a0ac1c027/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1226.366594] env[61868]: DEBUG oslo_vmware.rw_handles [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1226.366823] env[61868]: DEBUG oslo_vmware.rw_handles [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/13f3f1a3-7563-40c1-9f21-591a0ac1c027/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1226.483187] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-663c2a78-d329-4de6-9fb6-5a3dc7e29991 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.491106] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-726ecf4f-8a2d-435e-a0ca-18023de348e1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.521858] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e20e444-0499-4cd2-99b9-fa9edd7f1ff1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.529622] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e0058f-9e39-489d-a733-5f8da70bb2ea {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.544405] env[61868]: DEBUG nova.compute.provider_tree [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1226.553442] env[61868]: DEBUG nova.scheduler.client.report [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1226.570239] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.515s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1226.570938] env[61868]: ERROR nova.compute.manager [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1226.570938] env[61868]: Faults: ['InvalidArgument'] [ 1226.570938] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Traceback (most recent call last): [ 1226.570938] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1226.570938] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] self.driver.spawn(context, instance, image_meta, [ 1226.570938] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1226.570938] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1226.570938] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1226.570938] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] self._fetch_image_if_missing(context, vi) [ 1226.570938] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1226.570938] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] image_cache(vi, tmp_image_ds_loc) [ 1226.570938] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1226.570938] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] vm_util.copy_virtual_disk( [ 1226.570938] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1226.570938] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] session._wait_for_task(vmdk_copy_task) [ 1226.570938] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1226.570938] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] return self.wait_for_task(task_ref) [ 1226.570938] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1226.570938] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] return evt.wait() [ 1226.570938] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1226.570938] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] result = hub.switch() [ 1226.570938] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1226.570938] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] return self.greenlet.switch() [ 1226.570938] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1226.570938] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] self.f(*self.args, **self.kw) [ 1226.570938] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1226.570938] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] raise exceptions.translate_fault(task_info.error) [ 1226.570938] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1226.570938] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Faults: ['InvalidArgument'] [ 1226.570938] env[61868]: ERROR nova.compute.manager [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] [ 1226.572226] env[61868]: DEBUG nova.compute.utils [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1226.573745] env[61868]: DEBUG nova.compute.manager [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Build of instance 1df194bf-fa9b-4d03-9b20-8478147de566 was re-scheduled: A specified parameter was not correct: fileType [ 1226.573745] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1226.574180] env[61868]: DEBUG nova.compute.manager [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1226.574420] env[61868]: DEBUG nova.compute.manager [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1226.574645] env[61868]: DEBUG nova.compute.manager [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1226.574873] env[61868]: DEBUG nova.network.neutron [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1226.942301] env[61868]: DEBUG nova.network.neutron [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1226.956372] env[61868]: INFO nova.compute.manager [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Took 0.38 seconds to deallocate network for instance. [ 1227.066062] env[61868]: INFO nova.scheduler.client.report [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Deleted allocations for instance 1df194bf-fa9b-4d03-9b20-8478147de566 [ 1227.095129] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c57f9007-b228-4bad-ba10-81852dc5e28f tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Lock "1df194bf-fa9b-4d03-9b20-8478147de566" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 616.215s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1227.096349] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4411f1f3-36e7-4c94-a9a3-cda4b418b6ef tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Lock "1df194bf-fa9b-4d03-9b20-8478147de566" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 417.578s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1227.096570] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4411f1f3-36e7-4c94-a9a3-cda4b418b6ef tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Acquiring lock "1df194bf-fa9b-4d03-9b20-8478147de566-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1227.096947] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4411f1f3-36e7-4c94-a9a3-cda4b418b6ef tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Lock "1df194bf-fa9b-4d03-9b20-8478147de566-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1227.096947] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4411f1f3-36e7-4c94-a9a3-cda4b418b6ef tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Lock "1df194bf-fa9b-4d03-9b20-8478147de566-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1227.098885] env[61868]: INFO nova.compute.manager [None req-4411f1f3-36e7-4c94-a9a3-cda4b418b6ef tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Terminating instance [ 1227.100494] env[61868]: DEBUG nova.compute.manager [None req-4411f1f3-36e7-4c94-a9a3-cda4b418b6ef tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1227.100686] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4411f1f3-36e7-4c94-a9a3-cda4b418b6ef tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1227.101203] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7f3a5c04-685a-40dc-a17c-b176be187151 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.111300] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f06de597-4bb9-45c0-8261-7af0a858d188 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.122248] env[61868]: DEBUG nova.compute.manager [None req-5793acf2-2a6e-4aa6-865a-b29e18e7c107 tempest-ServersAaction247Test-821762318 tempest-ServersAaction247Test-821762318-project-member] [instance: 6ff71cf5-4473-4e25-b6f0-b3da104ed9ff] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1227.147820] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-4411f1f3-36e7-4c94-a9a3-cda4b418b6ef tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1df194bf-fa9b-4d03-9b20-8478147de566 could not be found. [ 1227.148037] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-4411f1f3-36e7-4c94-a9a3-cda4b418b6ef tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1227.148221] env[61868]: INFO nova.compute.manager [None req-4411f1f3-36e7-4c94-a9a3-cda4b418b6ef tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1227.148469] env[61868]: DEBUG oslo.service.loopingcall [None req-4411f1f3-36e7-4c94-a9a3-cda4b418b6ef tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1227.148704] env[61868]: DEBUG nova.compute.manager [-] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1227.148817] env[61868]: DEBUG nova.network.neutron [-] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1227.155855] env[61868]: DEBUG nova.compute.manager [None req-5793acf2-2a6e-4aa6-865a-b29e18e7c107 tempest-ServersAaction247Test-821762318 tempest-ServersAaction247Test-821762318-project-member] [instance: 6ff71cf5-4473-4e25-b6f0-b3da104ed9ff] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1227.184972] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5793acf2-2a6e-4aa6-865a-b29e18e7c107 tempest-ServersAaction247Test-821762318 tempest-ServersAaction247Test-821762318-project-member] Lock "6ff71cf5-4473-4e25-b6f0-b3da104ed9ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.349s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1227.186909] env[61868]: DEBUG nova.network.neutron [-] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1227.198585] env[61868]: INFO nova.compute.manager [-] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] Took 0.05 seconds to deallocate network for instance. [ 1227.204199] env[61868]: DEBUG nova.compute.manager [None req-83b9e23e-76e1-4b71-acf0-313c6d45d1c0 tempest-ServerRescueTestJSON-931646452 tempest-ServerRescueTestJSON-931646452-project-member] [instance: d59d2416-ac39-4e06-a20c-b6f392da4af2] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1227.232256] env[61868]: DEBUG nova.compute.manager [None req-83b9e23e-76e1-4b71-acf0-313c6d45d1c0 tempest-ServerRescueTestJSON-931646452 tempest-ServerRescueTestJSON-931646452-project-member] [instance: d59d2416-ac39-4e06-a20c-b6f392da4af2] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1227.256946] env[61868]: DEBUG oslo_concurrency.lockutils [None req-83b9e23e-76e1-4b71-acf0-313c6d45d1c0 tempest-ServerRescueTestJSON-931646452 tempest-ServerRescueTestJSON-931646452-project-member] Lock "d59d2416-ac39-4e06-a20c-b6f392da4af2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.544s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1227.273101] env[61868]: DEBUG nova.compute.manager [None req-d86bf205-ddd3-447f-89d4-aff3668a2089 tempest-ListServersNegativeTestJSON-1840204747 tempest-ListServersNegativeTestJSON-1840204747-project-member] [instance: efbd7994-e03a-40ab-978d-316667f3e43c] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1227.299583] env[61868]: DEBUG nova.compute.manager [None req-d86bf205-ddd3-447f-89d4-aff3668a2089 tempest-ListServersNegativeTestJSON-1840204747 tempest-ListServersNegativeTestJSON-1840204747-project-member] [instance: efbd7994-e03a-40ab-978d-316667f3e43c] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1227.306345] env[61868]: DEBUG oslo_concurrency.lockutils [None req-4411f1f3-36e7-4c94-a9a3-cda4b418b6ef tempest-ServerGroupTestJSON-1144577638 tempest-ServerGroupTestJSON-1144577638-project-member] Lock "1df194bf-fa9b-4d03-9b20-8478147de566" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.210s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1227.307822] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "1df194bf-fa9b-4d03-9b20-8478147de566" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 77.649s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1227.308023] env[61868]: INFO nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 1df194bf-fa9b-4d03-9b20-8478147de566] During sync_power_state the instance has a pending task (deleting). Skip. [ 1227.308203] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "1df194bf-fa9b-4d03-9b20-8478147de566" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1227.326891] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d86bf205-ddd3-447f-89d4-aff3668a2089 tempest-ListServersNegativeTestJSON-1840204747 tempest-ListServersNegativeTestJSON-1840204747-project-member] Lock "efbd7994-e03a-40ab-978d-316667f3e43c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.216s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1227.339596] env[61868]: DEBUG nova.compute.manager [None req-d86bf205-ddd3-447f-89d4-aff3668a2089 tempest-ListServersNegativeTestJSON-1840204747 tempest-ListServersNegativeTestJSON-1840204747-project-member] [instance: bbbde7a6-3107-46b9-b2c9-a4873916f7b6] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1227.368844] env[61868]: DEBUG nova.compute.manager [None req-d86bf205-ddd3-447f-89d4-aff3668a2089 tempest-ListServersNegativeTestJSON-1840204747 tempest-ListServersNegativeTestJSON-1840204747-project-member] [instance: bbbde7a6-3107-46b9-b2c9-a4873916f7b6] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1227.394627] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d86bf205-ddd3-447f-89d4-aff3668a2089 tempest-ListServersNegativeTestJSON-1840204747 tempest-ListServersNegativeTestJSON-1840204747-project-member] Lock "bbbde7a6-3107-46b9-b2c9-a4873916f7b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.242s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1227.407281] env[61868]: DEBUG nova.compute.manager [None req-d86bf205-ddd3-447f-89d4-aff3668a2089 tempest-ListServersNegativeTestJSON-1840204747 tempest-ListServersNegativeTestJSON-1840204747-project-member] [instance: 652af8bc-d4b9-4a5a-bba5-7429e659133b] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1227.442506] env[61868]: DEBUG nova.compute.manager [None req-d86bf205-ddd3-447f-89d4-aff3668a2089 tempest-ListServersNegativeTestJSON-1840204747 tempest-ListServersNegativeTestJSON-1840204747-project-member] [instance: 652af8bc-d4b9-4a5a-bba5-7429e659133b] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1227.465811] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d86bf205-ddd3-447f-89d4-aff3668a2089 tempest-ListServersNegativeTestJSON-1840204747 tempest-ListServersNegativeTestJSON-1840204747-project-member] Lock "652af8bc-d4b9-4a5a-bba5-7429e659133b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.251s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1227.484045] env[61868]: DEBUG nova.compute.manager [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1227.539102] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1227.539409] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1227.540984] env[61868]: INFO nova.compute.claims [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1227.946818] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2909dec-1051-4655-978f-7e4f8ed06d80 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.956710] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be8b405c-41c0-4315-934f-0cc0a8256aaf {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.989054] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09ed6b27-ae96-48f1-b00a-cbdd6a5813b2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.996932] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33f72a79-85fd-46ab-80c0-09de3f474a9e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.010593] env[61868]: DEBUG nova.compute.provider_tree [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1228.019346] env[61868]: DEBUG nova.scheduler.client.report [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1228.037386] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.498s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1228.037756] env[61868]: DEBUG nova.compute.manager [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1228.076838] env[61868]: DEBUG nova.compute.utils [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1228.078613] env[61868]: DEBUG nova.compute.manager [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1228.079409] env[61868]: DEBUG nova.network.neutron [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1228.092185] env[61868]: DEBUG nova.compute.manager [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1228.128998] env[61868]: INFO nova.virt.block_device [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Booting with volume 6b172ac4-941c-4444-aef5-60dc17d4ad99 at /dev/sda [ 1228.133036] env[61868]: DEBUG nova.policy [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '09868a3d825645c3b2d493561dd3b2f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9c6829541f0f4e4d83b65ddd3f7e33ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 1228.163073] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6e778139-8478-44db-9ff4-397cc218958e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.172931] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a9bd02a-d41f-46c7-a0d8-ed7a7d6efecf {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.202931] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-46c3c7f1-34d7-4f62-95fa-cc61c3a12268 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.211511] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b0ba30-9a81-44db-81bf-68c07f744c47 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.238857] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad2ebee-0141-4a3f-b055-42a3d4e5f1e1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.246853] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a882869-56da-4c94-a910-4d7102e7ac49 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.262714] env[61868]: DEBUG nova.virt.block_device [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Updating existing volume attachment record: 94c3e455-6300-4b13-afdf-c8ceeaa60f50 {{(pid=61868) _volume_attach /opt/stack/nova/nova/virt/block_device.py:631}} [ 1228.505500] env[61868]: DEBUG nova.network.neutron [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Successfully created port: 7fa12cc5-b014-48e3-aece-c2095df78b6c {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1228.516082] env[61868]: DEBUG nova.compute.manager [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1228.516569] env[61868]: DEBUG nova.virt.hardware [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1228.516758] env[61868]: DEBUG nova.virt.hardware [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1228.516897] env[61868]: DEBUG nova.virt.hardware [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1228.517075] env[61868]: DEBUG nova.virt.hardware [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1228.517217] env[61868]: DEBUG nova.virt.hardware [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1228.517356] env[61868]: DEBUG nova.virt.hardware [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1228.517586] env[61868]: DEBUG nova.virt.hardware [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1228.517758] env[61868]: DEBUG nova.virt.hardware [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1228.517972] env[61868]: DEBUG nova.virt.hardware [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1228.518132] env[61868]: DEBUG nova.virt.hardware [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1228.518296] env[61868]: DEBUG nova.virt.hardware [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1228.519441] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c9a9c1d-2fe1-4cff-b848-761842e128f4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.529698] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e40db232-c72c-4c39-9c70-8de4309e8e17 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.849984] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Acquiring lock "16eb032d-fe34-4a46-883c-8b937806d63f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1229.102997] env[61868]: DEBUG nova.network.neutron [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Successfully updated port: 7fa12cc5-b014-48e3-aece-c2095df78b6c {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1229.113264] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Acquiring lock "refresh_cache-16eb032d-fe34-4a46-883c-8b937806d63f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1229.113420] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Acquired lock "refresh_cache-16eb032d-fe34-4a46-883c-8b937806d63f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1229.113565] env[61868]: DEBUG nova.network.neutron [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1229.157770] env[61868]: DEBUG nova.network.neutron [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1229.278263] env[61868]: DEBUG nova.compute.manager [req-8e8e84ce-04ab-487b-95f8-253ca13c489e req-9fd0b80b-0e77-499f-8963-2fdb60d11773 service nova] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Received event network-vif-plugged-7fa12cc5-b014-48e3-aece-c2095df78b6c {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1229.278482] env[61868]: DEBUG oslo_concurrency.lockutils [req-8e8e84ce-04ab-487b-95f8-253ca13c489e req-9fd0b80b-0e77-499f-8963-2fdb60d11773 service nova] Acquiring lock "16eb032d-fe34-4a46-883c-8b937806d63f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1229.278693] env[61868]: DEBUG oslo_concurrency.lockutils [req-8e8e84ce-04ab-487b-95f8-253ca13c489e req-9fd0b80b-0e77-499f-8963-2fdb60d11773 service nova] Lock "16eb032d-fe34-4a46-883c-8b937806d63f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1229.279077] env[61868]: DEBUG oslo_concurrency.lockutils [req-8e8e84ce-04ab-487b-95f8-253ca13c489e req-9fd0b80b-0e77-499f-8963-2fdb60d11773 service nova] Lock "16eb032d-fe34-4a46-883c-8b937806d63f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1229.279131] env[61868]: DEBUG nova.compute.manager [req-8e8e84ce-04ab-487b-95f8-253ca13c489e req-9fd0b80b-0e77-499f-8963-2fdb60d11773 service nova] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] No waiting events found dispatching network-vif-plugged-7fa12cc5-b014-48e3-aece-c2095df78b6c {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1229.279315] env[61868]: WARNING nova.compute.manager [req-8e8e84ce-04ab-487b-95f8-253ca13c489e req-9fd0b80b-0e77-499f-8963-2fdb60d11773 service nova] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Received unexpected event network-vif-plugged-7fa12cc5-b014-48e3-aece-c2095df78b6c for instance with vm_state building and task_state deleting. [ 1229.345641] env[61868]: DEBUG nova.network.neutron [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Updating instance_info_cache with network_info: [{"id": "7fa12cc5-b014-48e3-aece-c2095df78b6c", "address": "fa:16:3e:0a:ab:03", "network": {"id": "32f49e33-75d0-47e2-b03f-a410ba437e66", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1806371306-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "9c6829541f0f4e4d83b65ddd3f7e33ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac4015e0-e5e7-4b3f-8d8e-ef4501eea9aa", "external-id": "nsx-vlan-transportzone-132", "segmentation_id": 132, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7fa12cc5-b0", "ovs_interfaceid": "7fa12cc5-b014-48e3-aece-c2095df78b6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1229.364339] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Releasing lock "refresh_cache-16eb032d-fe34-4a46-883c-8b937806d63f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1229.364924] env[61868]: DEBUG nova.compute.manager [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Instance network_info: |[{"id": "7fa12cc5-b014-48e3-aece-c2095df78b6c", "address": "fa:16:3e:0a:ab:03", "network": {"id": "32f49e33-75d0-47e2-b03f-a410ba437e66", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1806371306-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "9c6829541f0f4e4d83b65ddd3f7e33ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac4015e0-e5e7-4b3f-8d8e-ef4501eea9aa", "external-id": "nsx-vlan-transportzone-132", "segmentation_id": 132, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7fa12cc5-b0", "ovs_interfaceid": "7fa12cc5-b014-48e3-aece-c2095df78b6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1229.365869] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:ab:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac4015e0-e5e7-4b3f-8d8e-ef4501eea9aa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7fa12cc5-b014-48e3-aece-c2095df78b6c', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1229.374156] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Creating folder: Project (9c6829541f0f4e4d83b65ddd3f7e33ef). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1229.374986] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c3ee8176-1b5b-4416-85b9-3550f07ebd55 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.392137] env[61868]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1229.392557] env[61868]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=61868) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1229.393038] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Folder already exists: Project (9c6829541f0f4e4d83b65ddd3f7e33ef). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1229.393461] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Creating folder: Instances. Parent ref: group-v18243. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1229.393839] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eee5a995-b56f-4daa-9dff-624ead1c30c2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.404083] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Created folder: Instances in parent group-v18243. [ 1229.404836] env[61868]: DEBUG oslo.service.loopingcall [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1229.405521] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1229.405928] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e5069c12-51ee-4801-8a62-1583d6922db5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.429933] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1229.429933] env[61868]: value = "task-41065" [ 1229.429933] env[61868]: _type = "Task" [ 1229.429933] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.438891] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41065, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.941763] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41065, 'name': CreateVM_Task, 'duration_secs': 0.30965} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.942077] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1229.948825] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1229.949072] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1229.951993] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b15557ed-8b82-4e60-9e87-5a4921cbf792 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.985897] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Reconfiguring VM instance to enable vnc on port - 5902 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 1229.986276] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b08f7d54-d9f6-4af5-86a0-839ecbaa0490 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.002329] env[61868]: DEBUG oslo_vmware.api [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Waiting for the task: (returnval){ [ 1230.002329] env[61868]: value = "task-41066" [ 1230.002329] env[61868]: _type = "Task" [ 1230.002329] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.010478] env[61868]: DEBUG oslo_vmware.api [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Task: {'id': task-41066, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.513023] env[61868]: DEBUG oslo_vmware.api [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Task: {'id': task-41066, 'name': ReconfigVM_Task, 'duration_secs': 0.119499} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.513338] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Reconfigured VM instance to enable vnc on port - 5902 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 1230.513552] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.564s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1230.513824] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'boot_index': 0, 'mount_device': '/dev/sda', 'delete_on_termination': True, 'guest_format': None, 'attachment_id': '94c3e455-6300-4b13-afdf-c8ceeaa60f50', 'disk_bus': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-18246', 'volume_id': '6b172ac4-941c-4444-aef5-60dc17d4ad99', 'name': 'volume-6b172ac4-941c-4444-aef5-60dc17d4ad99', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '16eb032d-fe34-4a46-883c-8b937806d63f', 'attached_at': '', 'detached_at': '', 'volume_id': '6b172ac4-941c-4444-aef5-60dc17d4ad99', 'serial': '6b172ac4-941c-4444-aef5-60dc17d4ad99'}, 'device_type': None, 'volume_type': None}], 'swap': None} {{(pid=61868) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1230.514036] env[61868]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Root volume attach. Driver type: vmdk {{(pid=61868) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1230.514819] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdc1d925-602d-4a5a-b23a-1a4271702a28 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.523053] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97e61a83-9442-429a-b082-0bee076a33b2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.529202] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40edc323-532d-4431-b799-8fb7651fdc08 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.535663] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-2b1a30e7-dd3a-4aa5-9140-9b287580d782 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.542833] env[61868]: DEBUG oslo_vmware.api [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Waiting for the task: (returnval){ [ 1230.542833] env[61868]: value = "task-41067" [ 1230.542833] env[61868]: _type = "Task" [ 1230.542833] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.551418] env[61868]: DEBUG oslo_vmware.api [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Task: {'id': task-41067, 'name': RelocateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.054017] env[61868]: DEBUG oslo_vmware.api [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Task: {'id': task-41067, 'name': RelocateVM_Task} progress is 40%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.309088] env[61868]: DEBUG nova.compute.manager [req-e26e7fb2-55c2-4e7f-8383-edf47a5e0bcd req-932200b0-d16e-4e30-8f93-68eefc1057da service nova] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Received event network-changed-7fa12cc5-b014-48e3-aece-c2095df78b6c {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1231.309341] env[61868]: DEBUG nova.compute.manager [req-e26e7fb2-55c2-4e7f-8383-edf47a5e0bcd req-932200b0-d16e-4e30-8f93-68eefc1057da service nova] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Refreshing instance network info cache due to event network-changed-7fa12cc5-b014-48e3-aece-c2095df78b6c. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1231.309525] env[61868]: DEBUG oslo_concurrency.lockutils [req-e26e7fb2-55c2-4e7f-8383-edf47a5e0bcd req-932200b0-d16e-4e30-8f93-68eefc1057da service nova] Acquiring lock "refresh_cache-16eb032d-fe34-4a46-883c-8b937806d63f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1231.309661] env[61868]: DEBUG oslo_concurrency.lockutils [req-e26e7fb2-55c2-4e7f-8383-edf47a5e0bcd req-932200b0-d16e-4e30-8f93-68eefc1057da service nova] Acquired lock "refresh_cache-16eb032d-fe34-4a46-883c-8b937806d63f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1231.309871] env[61868]: DEBUG nova.network.neutron [req-e26e7fb2-55c2-4e7f-8383-edf47a5e0bcd req-932200b0-d16e-4e30-8f93-68eefc1057da service nova] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Refreshing network info cache for port 7fa12cc5-b014-48e3-aece-c2095df78b6c {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1231.555984] env[61868]: DEBUG oslo_vmware.api [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Task: {'id': task-41067, 'name': RelocateVM_Task} progress is 53%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.788555] env[61868]: DEBUG nova.network.neutron [req-e26e7fb2-55c2-4e7f-8383-edf47a5e0bcd req-932200b0-d16e-4e30-8f93-68eefc1057da service nova] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Updated VIF entry in instance network info cache for port 7fa12cc5-b014-48e3-aece-c2095df78b6c. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1231.789035] env[61868]: DEBUG nova.network.neutron [req-e26e7fb2-55c2-4e7f-8383-edf47a5e0bcd req-932200b0-d16e-4e30-8f93-68eefc1057da service nova] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Updating instance_info_cache with network_info: [{"id": "7fa12cc5-b014-48e3-aece-c2095df78b6c", "address": "fa:16:3e:0a:ab:03", "network": {"id": "32f49e33-75d0-47e2-b03f-a410ba437e66", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1806371306-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "9c6829541f0f4e4d83b65ddd3f7e33ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac4015e0-e5e7-4b3f-8d8e-ef4501eea9aa", "external-id": "nsx-vlan-transportzone-132", "segmentation_id": 132, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7fa12cc5-b0", "ovs_interfaceid": "7fa12cc5-b014-48e3-aece-c2095df78b6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1231.801577] env[61868]: DEBUG oslo_concurrency.lockutils [req-e26e7fb2-55c2-4e7f-8383-edf47a5e0bcd req-932200b0-d16e-4e30-8f93-68eefc1057da service nova] Releasing lock "refresh_cache-16eb032d-fe34-4a46-883c-8b937806d63f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1232.055585] env[61868]: DEBUG oslo_vmware.api [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Task: {'id': task-41067, 'name': RelocateVM_Task} progress is 67%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.558128] env[61868]: DEBUG oslo_vmware.api [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Task: {'id': task-41067, 'name': RelocateVM_Task} progress is 82%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.056192] env[61868]: DEBUG oslo_vmware.api [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Task: {'id': task-41067, 'name': RelocateVM_Task} progress is 97%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.556094] env[61868]: DEBUG oslo_vmware.api [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Task: {'id': task-41067, 'name': RelocateVM_Task} progress is 98%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.057379] env[61868]: DEBUG oslo_vmware.api [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Task: {'id': task-41067, 'name': RelocateVM_Task, 'duration_secs': 3.15112} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.057690] env[61868]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Volume attach. Driver type: vmdk {{(pid=61868) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1234.057875] env[61868]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-18246', 'volume_id': '6b172ac4-941c-4444-aef5-60dc17d4ad99', 'name': 'volume-6b172ac4-941c-4444-aef5-60dc17d4ad99', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '16eb032d-fe34-4a46-883c-8b937806d63f', 'attached_at': '', 'detached_at': '', 'volume_id': '6b172ac4-941c-4444-aef5-60dc17d4ad99', 'serial': '6b172ac4-941c-4444-aef5-60dc17d4ad99'} {{(pid=61868) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1234.058647] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a60d65-cd52-482c-ac75-8b3e22d2decd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.074526] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c14a342-4a3a-4128-9207-053cf98cc312 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.084831] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c25933dd-197e-4be0-a2c6-63db29b4b001 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.102737] env[61868]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Reconfiguring VM instance instance-0000003d to attach disk [datastore2] volume-6b172ac4-941c-4444-aef5-60dc17d4ad99/volume-6b172ac4-941c-4444-aef5-60dc17d4ad99.vmdk or device None with type thin {{(pid=61868) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1234.103238] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-959063be-69a5-4e7e-821d-2945ba897831 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.121724] env[61868]: DEBUG oslo_vmware.api [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Waiting for the task: (returnval){ [ 1234.121724] env[61868]: value = "task-41068" [ 1234.121724] env[61868]: _type = "Task" [ 1234.121724] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.130950] env[61868]: DEBUG oslo_vmware.api [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Task: {'id': task-41068, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.631755] env[61868]: DEBUG oslo_vmware.api [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Task: {'id': task-41068, 'name': ReconfigVM_Task, 'duration_secs': 0.282487} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.632099] env[61868]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Reconfigured VM instance instance-0000003d to attach disk [datastore2] volume-6b172ac4-941c-4444-aef5-60dc17d4ad99/volume-6b172ac4-941c-4444-aef5-60dc17d4ad99.vmdk or device None with type thin {{(pid=61868) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1234.636657] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-498d9032-2ecd-4942-8275-bf1af3c06f1d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.652165] env[61868]: DEBUG oslo_vmware.api [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Waiting for the task: (returnval){ [ 1234.652165] env[61868]: value = "task-41069" [ 1234.652165] env[61868]: _type = "Task" [ 1234.652165] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.660630] env[61868]: DEBUG oslo_vmware.api [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Task: {'id': task-41069, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.986012] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquiring lock "d6f7828e-6617-40ca-9f6c-e3a72c328dc9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1234.986253] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Lock "d6f7828e-6617-40ca-9f6c-e3a72c328dc9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1235.162594] env[61868]: DEBUG oslo_vmware.api [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Task: {'id': task-41069, 'name': ReconfigVM_Task, 'duration_secs': 0.145071} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.162908] env[61868]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-18246', 'volume_id': '6b172ac4-941c-4444-aef5-60dc17d4ad99', 'name': 'volume-6b172ac4-941c-4444-aef5-60dc17d4ad99', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '16eb032d-fe34-4a46-883c-8b937806d63f', 'attached_at': '', 'detached_at': '', 'volume_id': '6b172ac4-941c-4444-aef5-60dc17d4ad99', 'serial': '6b172ac4-941c-4444-aef5-60dc17d4ad99'} {{(pid=61868) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1235.163561] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a1ab5e1e-8c97-48db-8a5a-2c331ccf82dc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.170876] env[61868]: DEBUG oslo_vmware.api [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Waiting for the task: (returnval){ [ 1235.170876] env[61868]: value = "task-41070" [ 1235.170876] env[61868]: _type = "Task" [ 1235.170876] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.179657] env[61868]: DEBUG oslo_vmware.api [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Task: {'id': task-41070, 'name': Rename_Task} progress is 5%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.681202] env[61868]: DEBUG oslo_vmware.api [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Task: {'id': task-41070, 'name': Rename_Task, 'duration_secs': 0.134846} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.682075] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Powering on the VM {{(pid=61868) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1235.682378] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dd7e1571-35c0-4897-b187-147d32c519d5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.689614] env[61868]: DEBUG oslo_vmware.api [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Waiting for the task: (returnval){ [ 1235.689614] env[61868]: value = "task-41071" [ 1235.689614] env[61868]: _type = "Task" [ 1235.689614] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.699483] env[61868]: DEBUG oslo_vmware.api [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Task: {'id': task-41071, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.200360] env[61868]: DEBUG oslo_vmware.api [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Task: {'id': task-41071, 'name': PowerOnVM_Task, 'duration_secs': 0.431697} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.201245] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Powered on the VM {{(pid=61868) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1236.201634] env[61868]: INFO nova.compute.manager [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Took 7.69 seconds to spawn the instance on the hypervisor. [ 1236.202021] env[61868]: DEBUG nova.compute.manager [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Checking state {{(pid=61868) _get_power_state /opt/stack/nova/nova/compute/manager.py:1782}} [ 1236.202916] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc17b9c4-a0f6-4fd5-9b45-8999d420c865 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.255527] env[61868]: DEBUG nova.compute.utils [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Conflict updating instance 16eb032d-fe34-4a46-883c-8b937806d63f. Expected: {'task_state': ['spawning']}. Actual: {'task_state': 'deleting'} {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1236.257706] env[61868]: DEBUG nova.compute.manager [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Instance disappeared during build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2504}} [ 1236.257915] env[61868]: DEBUG nova.compute.manager [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1236.258132] env[61868]: DEBUG nova.compute.manager [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1236.258340] env[61868]: DEBUG nova.compute.manager [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1236.258539] env[61868]: DEBUG nova.network.neutron [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1236.556992] env[61868]: DEBUG nova.network.neutron [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1236.575112] env[61868]: INFO nova.compute.manager [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Took 0.32 seconds to deallocate network for instance. [ 1236.586076] env[61868]: DEBUG nova.compute.manager [req-a6e646c9-1c8d-48a9-9245-33a4837eb08e req-bf5ba343-71ce-47c2-b34a-24e734813d8c service nova] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Received event network-vif-deleted-7fa12cc5-b014-48e3-aece-c2095df78b6c {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1236.656823] env[61868]: INFO nova.scheduler.client.report [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Deleted allocations for instance 16eb032d-fe34-4a46-883c-8b937806d63f [ 1236.657246] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ca313414-8c7b-4c9c-b00d-4ff389a82292 tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Lock "16eb032d-fe34-4a46-883c-8b937806d63f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.098s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1236.658340] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Lock "16eb032d-fe34-4a46-883c-8b937806d63f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 7.809s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1236.658576] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Acquiring lock "16eb032d-fe34-4a46-883c-8b937806d63f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1236.658760] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Lock "16eb032d-fe34-4a46-883c-8b937806d63f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1236.658918] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Lock "16eb032d-fe34-4a46-883c-8b937806d63f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1236.660733] env[61868]: INFO nova.compute.manager [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Terminating instance [ 1236.662570] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Acquiring lock "refresh_cache-16eb032d-fe34-4a46-883c-8b937806d63f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1236.662692] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Acquired lock "refresh_cache-16eb032d-fe34-4a46-883c-8b937806d63f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1236.662858] env[61868]: DEBUG nova.network.neutron [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1236.669272] env[61868]: DEBUG nova.compute.manager [None req-2ebced76-abaf-4ace-be06-2fdccdbd8ac5 tempest-ServerShowV257Test-1904292548 tempest-ServerShowV257Test-1904292548-project-member] [instance: 649d7eda-b095-4bb0-962a-acb8dfa50516] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1236.691970] env[61868]: DEBUG nova.network.neutron [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1236.695669] env[61868]: DEBUG nova.compute.manager [None req-2ebced76-abaf-4ace-be06-2fdccdbd8ac5 tempest-ServerShowV257Test-1904292548 tempest-ServerShowV257Test-1904292548-project-member] [instance: 649d7eda-b095-4bb0-962a-acb8dfa50516] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1236.718209] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2ebced76-abaf-4ace-be06-2fdccdbd8ac5 tempest-ServerShowV257Test-1904292548 tempest-ServerShowV257Test-1904292548-project-member] Lock "649d7eda-b095-4bb0-962a-acb8dfa50516" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.691s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1236.730087] env[61868]: DEBUG nova.compute.manager [None req-be7f336c-3c98-4711-8df2-ffc8de027400 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 5c20c9b3-467c-4c82-9a30-883a4cd62e8e] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1236.750393] env[61868]: DEBUG nova.network.neutron [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1236.758337] env[61868]: DEBUG nova.compute.manager [None req-be7f336c-3c98-4711-8df2-ffc8de027400 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 5c20c9b3-467c-4c82-9a30-883a4cd62e8e] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1236.761212] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Releasing lock "refresh_cache-16eb032d-fe34-4a46-883c-8b937806d63f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1236.761793] env[61868]: DEBUG nova.compute.manager [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1236.762075] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Powering off the VM {{(pid=61868) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1236.762575] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b49e14d5-4c4e-4fb4-92f6-79bd17c3a198 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.770242] env[61868]: DEBUG oslo_vmware.api [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Waiting for the task: (returnval){ [ 1236.770242] env[61868]: value = "task-41072" [ 1236.770242] env[61868]: _type = "Task" [ 1236.770242] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.781740] env[61868]: DEBUG oslo_vmware.api [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Task: {'id': task-41072, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.782744] env[61868]: DEBUG oslo_concurrency.lockutils [None req-be7f336c-3c98-4711-8df2-ffc8de027400 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Lock "5c20c9b3-467c-4c82-9a30-883a4cd62e8e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.519s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1236.796357] env[61868]: DEBUG nova.compute.manager [None req-1a007db2-064c-414a-8b4f-cafe6f4cdcf2 tempest-ListServerFiltersTestJSON-535461354 tempest-ListServerFiltersTestJSON-535461354-project-member] [instance: e0c5de36-1d7f-49a0-a13e-faf0e8ae81a3] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1236.834626] env[61868]: DEBUG nova.compute.manager [None req-1a007db2-064c-414a-8b4f-cafe6f4cdcf2 tempest-ListServerFiltersTestJSON-535461354 tempest-ListServerFiltersTestJSON-535461354-project-member] [instance: e0c5de36-1d7f-49a0-a13e-faf0e8ae81a3] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1236.859936] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1a007db2-064c-414a-8b4f-cafe6f4cdcf2 tempest-ListServerFiltersTestJSON-535461354 tempest-ListServerFiltersTestJSON-535461354-project-member] Lock "e0c5de36-1d7f-49a0-a13e-faf0e8ae81a3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.875s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1236.872949] env[61868]: DEBUG nova.compute.manager [None req-c802b8dc-f5ba-41fc-9fa0-d8bbdc48893f tempest-ListServerFiltersTestJSON-535461354 tempest-ListServerFiltersTestJSON-535461354-project-member] [instance: e44bdea6-05c1-43c9-b019-d762df3a6451] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1236.902654] env[61868]: DEBUG nova.compute.manager [None req-c802b8dc-f5ba-41fc-9fa0-d8bbdc48893f tempest-ListServerFiltersTestJSON-535461354 tempest-ListServerFiltersTestJSON-535461354-project-member] [instance: e44bdea6-05c1-43c9-b019-d762df3a6451] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1236.930124] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c802b8dc-f5ba-41fc-9fa0-d8bbdc48893f tempest-ListServerFiltersTestJSON-535461354 tempest-ListServerFiltersTestJSON-535461354-project-member] Lock "e44bdea6-05c1-43c9-b019-d762df3a6451" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.287s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1236.944979] env[61868]: DEBUG nova.compute.manager [None req-ff01555b-0604-48be-b780-6caf3bc413a5 tempest-ListServerFiltersTestJSON-535461354 tempest-ListServerFiltersTestJSON-535461354-project-member] [instance: 17b8aca6-ebe8-4a10-b724-2fa03d991d0c] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1236.973339] env[61868]: DEBUG nova.compute.manager [None req-ff01555b-0604-48be-b780-6caf3bc413a5 tempest-ListServerFiltersTestJSON-535461354 tempest-ListServerFiltersTestJSON-535461354-project-member] [instance: 17b8aca6-ebe8-4a10-b724-2fa03d991d0c] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1236.996535] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ff01555b-0604-48be-b780-6caf3bc413a5 tempest-ListServerFiltersTestJSON-535461354 tempest-ListServerFiltersTestJSON-535461354-project-member] Lock "17b8aca6-ebe8-4a10-b724-2fa03d991d0c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.715s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1237.008659] env[61868]: DEBUG nova.compute.manager [None req-455439a7-de1e-409b-927c-f866a30931e8 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 78dde060-c9e0-4f7d-a012-883b8a82b3b4] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1237.035127] env[61868]: DEBUG nova.compute.manager [None req-455439a7-de1e-409b-927c-f866a30931e8 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 78dde060-c9e0-4f7d-a012-883b8a82b3b4] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1237.059926] env[61868]: DEBUG oslo_concurrency.lockutils [None req-455439a7-de1e-409b-927c-f866a30931e8 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "78dde060-c9e0-4f7d-a012-883b8a82b3b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 184.433s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1237.072522] env[61868]: DEBUG nova.compute.manager [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1237.126220] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1237.126506] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1237.128014] env[61868]: INFO nova.compute.claims [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1237.284213] env[61868]: DEBUG oslo_vmware.api [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Task: {'id': task-41072, 'name': PowerOffVM_Task, 'duration_secs': 0.183762} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.284550] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Powered off the VM {{(pid=61868) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1237.284759] env[61868]: DEBUG nova.virt.vmwareapi.volumeops [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Volume detach. Driver type: vmdk {{(pid=61868) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1237.284955] env[61868]: DEBUG nova.virt.vmwareapi.volumeops [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-18246', 'volume_id': '6b172ac4-941c-4444-aef5-60dc17d4ad99', 'name': 'volume-6b172ac4-941c-4444-aef5-60dc17d4ad99', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '16eb032d-fe34-4a46-883c-8b937806d63f', 'attached_at': '', 'detached_at': '', 'volume_id': '6b172ac4-941c-4444-aef5-60dc17d4ad99', 'serial': '6b172ac4-941c-4444-aef5-60dc17d4ad99'} {{(pid=61868) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1237.285747] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-406ea53e-5508-4793-8d66-6384dcf9b204 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.308924] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d410b1-ed3c-4027-b79b-6b11b4f35afd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.317162] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bfe497c-99ae-45c3-89f5-a0f8b1ebe054 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.342377] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bfd7fb8-508b-445c-83b2-166bf67f04eb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.349366] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad320fc5-b68f-4055-87a9-8e735d18e5b7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.364231] env[61868]: DEBUG nova.virt.vmwareapi.volumeops [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] The volume has not been displaced from its original location: [datastore2] volume-6b172ac4-941c-4444-aef5-60dc17d4ad99/volume-6b172ac4-941c-4444-aef5-60dc17d4ad99.vmdk. No consolidation needed. {{(pid=61868) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1237.369464] env[61868]: DEBUG nova.virt.vmwareapi.volumeops [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Reconfiguring VM instance instance-0000003d to detach disk 3000 {{(pid=61868) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1237.372110] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07804a0b-20dd-434e-a1e5-5554569eb998 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.391390] env[61868]: DEBUG oslo_vmware.api [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Waiting for the task: (returnval){ [ 1237.391390] env[61868]: value = "task-41073" [ 1237.391390] env[61868]: _type = "Task" [ 1237.391390] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.399969] env[61868]: DEBUG oslo_vmware.api [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Task: {'id': task-41073, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.500230] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97c78467-bee3-4f48-8c03-8b0ab42aa620 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.508804] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb827109-d75e-4691-9b16-7b49839e5b52 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.541122] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d9b3dff-6569-45dc-b5b2-675503ba1ee3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.549213] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c1235d1-4351-4c88-bd22-9e8c2472b335 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.562507] env[61868]: DEBUG nova.compute.provider_tree [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1237.572851] env[61868]: DEBUG nova.scheduler.client.report [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1237.592143] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.465s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1237.592661] env[61868]: DEBUG nova.compute.manager [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1237.627362] env[61868]: DEBUG nova.compute.utils [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1237.628762] env[61868]: DEBUG nova.compute.manager [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1237.628853] env[61868]: DEBUG nova.network.neutron [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1237.641584] env[61868]: DEBUG nova.compute.manager [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1237.684323] env[61868]: DEBUG nova.policy [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '424b508614194ac2ad15e8cb62f2d041', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6f518980782c4dc5ac6efe31af19af16', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 1237.715511] env[61868]: DEBUG nova.compute.manager [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1237.740534] env[61868]: DEBUG nova.virt.hardware [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1237.740813] env[61868]: DEBUG nova.virt.hardware [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1237.740962] env[61868]: DEBUG nova.virt.hardware [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1237.741148] env[61868]: DEBUG nova.virt.hardware [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1237.741294] env[61868]: DEBUG nova.virt.hardware [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1237.741440] env[61868]: DEBUG nova.virt.hardware [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1237.741644] env[61868]: DEBUG nova.virt.hardware [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1237.741824] env[61868]: DEBUG nova.virt.hardware [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1237.742009] env[61868]: DEBUG nova.virt.hardware [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1237.742173] env[61868]: DEBUG nova.virt.hardware [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1237.742347] env[61868]: DEBUG nova.virt.hardware [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1237.743200] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d44732-172a-498b-a94e-f276a45462c9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.751415] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9648302c-77f1-4d14-ae7b-25eba3606a48 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.902564] env[61868]: DEBUG oslo_vmware.api [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Task: {'id': task-41073, 'name': ReconfigVM_Task, 'duration_secs': 0.147183} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.902866] env[61868]: DEBUG nova.virt.vmwareapi.volumeops [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Reconfigured VM instance instance-0000003d to detach disk 3000 {{(pid=61868) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1237.907453] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4325b881-0696-4aa9-b971-f704aad2478b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.923592] env[61868]: DEBUG oslo_vmware.api [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Waiting for the task: (returnval){ [ 1237.923592] env[61868]: value = "task-41074" [ 1237.923592] env[61868]: _type = "Task" [ 1237.923592] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.932660] env[61868]: DEBUG oslo_vmware.api [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Task: {'id': task-41074, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.973385] env[61868]: DEBUG nova.network.neutron [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Successfully created port: c1f25457-4838-4a36-ae2c-4a60c84796ef {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1238.434328] env[61868]: DEBUG oslo_vmware.api [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Task: {'id': task-41074, 'name': ReconfigVM_Task, 'duration_secs': 0.107213} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.434683] env[61868]: DEBUG nova.virt.vmwareapi.volumeops [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-18246', 'volume_id': '6b172ac4-941c-4444-aef5-60dc17d4ad99', 'name': 'volume-6b172ac4-941c-4444-aef5-60dc17d4ad99', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '16eb032d-fe34-4a46-883c-8b937806d63f', 'attached_at': '', 'detached_at': '', 'volume_id': '6b172ac4-941c-4444-aef5-60dc17d4ad99', 'serial': '6b172ac4-941c-4444-aef5-60dc17d4ad99'} {{(pid=61868) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1238.435029] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1238.436026] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a43b3bf-783e-4550-b38f-7ccfd0ed5d47 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.443796] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1238.444085] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-159ff17c-4fca-42ac-b769-83222aeeda80 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.507111] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1238.507527] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1238.507741] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Deleting the datastore file [datastore2] 16eb032d-fe34-4a46-883c-8b937806d63f {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1238.508069] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b770620b-a893-4bc1-869b-8318263ca11f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.516742] env[61868]: DEBUG oslo_vmware.api [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Waiting for the task: (returnval){ [ 1238.516742] env[61868]: value = "task-41076" [ 1238.516742] env[61868]: _type = "Task" [ 1238.516742] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.526849] env[61868]: DEBUG oslo_vmware.api [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Task: {'id': task-41076, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.543513] env[61868]: DEBUG nova.network.neutron [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Successfully updated port: c1f25457-4838-4a36-ae2c-4a60c84796ef {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1238.562502] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "refresh_cache-4618de15-8f2c-4165-8f23-a4a5542f3d0b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1238.562647] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquired lock "refresh_cache-4618de15-8f2c-4165-8f23-a4a5542f3d0b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1238.562795] env[61868]: DEBUG nova.network.neutron [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1238.611629] env[61868]: DEBUG nova.network.neutron [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1238.617395] env[61868]: DEBUG nova.compute.manager [req-123f26f7-6bdc-4202-b68b-a118f5a46427 req-5810bbc6-c440-42de-86f8-e766cab4f4ab service nova] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Received event network-vif-plugged-c1f25457-4838-4a36-ae2c-4a60c84796ef {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1238.617604] env[61868]: DEBUG oslo_concurrency.lockutils [req-123f26f7-6bdc-4202-b68b-a118f5a46427 req-5810bbc6-c440-42de-86f8-e766cab4f4ab service nova] Acquiring lock "4618de15-8f2c-4165-8f23-a4a5542f3d0b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1238.617820] env[61868]: DEBUG oslo_concurrency.lockutils [req-123f26f7-6bdc-4202-b68b-a118f5a46427 req-5810bbc6-c440-42de-86f8-e766cab4f4ab service nova] Lock "4618de15-8f2c-4165-8f23-a4a5542f3d0b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1238.617980] env[61868]: DEBUG oslo_concurrency.lockutils [req-123f26f7-6bdc-4202-b68b-a118f5a46427 req-5810bbc6-c440-42de-86f8-e766cab4f4ab service nova] Lock "4618de15-8f2c-4165-8f23-a4a5542f3d0b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1238.618140] env[61868]: DEBUG nova.compute.manager [req-123f26f7-6bdc-4202-b68b-a118f5a46427 req-5810bbc6-c440-42de-86f8-e766cab4f4ab service nova] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] No waiting events found dispatching network-vif-plugged-c1f25457-4838-4a36-ae2c-4a60c84796ef {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1238.618300] env[61868]: WARNING nova.compute.manager [req-123f26f7-6bdc-4202-b68b-a118f5a46427 req-5810bbc6-c440-42de-86f8-e766cab4f4ab service nova] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Received unexpected event network-vif-plugged-c1f25457-4838-4a36-ae2c-4a60c84796ef for instance with vm_state building and task_state spawning. [ 1238.618451] env[61868]: DEBUG nova.compute.manager [req-123f26f7-6bdc-4202-b68b-a118f5a46427 req-5810bbc6-c440-42de-86f8-e766cab4f4ab service nova] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Received event network-changed-c1f25457-4838-4a36-ae2c-4a60c84796ef {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1238.618596] env[61868]: DEBUG nova.compute.manager [req-123f26f7-6bdc-4202-b68b-a118f5a46427 req-5810bbc6-c440-42de-86f8-e766cab4f4ab service nova] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Refreshing instance network info cache due to event network-changed-c1f25457-4838-4a36-ae2c-4a60c84796ef. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1238.618796] env[61868]: DEBUG oslo_concurrency.lockutils [req-123f26f7-6bdc-4202-b68b-a118f5a46427 req-5810bbc6-c440-42de-86f8-e766cab4f4ab service nova] Acquiring lock "refresh_cache-4618de15-8f2c-4165-8f23-a4a5542f3d0b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1238.785966] env[61868]: DEBUG nova.network.neutron [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Updating instance_info_cache with network_info: [{"id": "c1f25457-4838-4a36-ae2c-4a60c84796ef", "address": "fa:16:3e:28:94:65", "network": {"id": "6da51a65-4f1d-44b5-8bb8-b049cebe1cc1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2119750877-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "6f518980782c4dc5ac6efe31af19af16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1f25457-48", "ovs_interfaceid": "c1f25457-4838-4a36-ae2c-4a60c84796ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1238.802789] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Releasing lock "refresh_cache-4618de15-8f2c-4165-8f23-a4a5542f3d0b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1238.803397] env[61868]: DEBUG nova.compute.manager [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Instance network_info: |[{"id": "c1f25457-4838-4a36-ae2c-4a60c84796ef", "address": "fa:16:3e:28:94:65", "network": {"id": "6da51a65-4f1d-44b5-8bb8-b049cebe1cc1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2119750877-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "6f518980782c4dc5ac6efe31af19af16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1f25457-48", "ovs_interfaceid": "c1f25457-4838-4a36-ae2c-4a60c84796ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1238.803885] env[61868]: DEBUG oslo_concurrency.lockutils [req-123f26f7-6bdc-4202-b68b-a118f5a46427 req-5810bbc6-c440-42de-86f8-e766cab4f4ab service nova] Acquired lock "refresh_cache-4618de15-8f2c-4165-8f23-a4a5542f3d0b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1238.804212] env[61868]: DEBUG nova.network.neutron [req-123f26f7-6bdc-4202-b68b-a118f5a46427 req-5810bbc6-c440-42de-86f8-e766cab4f4ab service nova] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Refreshing network info cache for port c1f25457-4838-4a36-ae2c-4a60c84796ef {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1238.805687] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:28:94:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c1b8689-a9b4-4972-beb9-6a1c8de1dc88', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c1f25457-4838-4a36-ae2c-4a60c84796ef', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1238.813372] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Creating folder: Project (6f518980782c4dc5ac6efe31af19af16). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1238.814591] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0c32864c-ba49-4122-8ae0-71e7fcf8aae8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.829051] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Created folder: Project (6f518980782c4dc5ac6efe31af19af16) in parent group-v18181. [ 1238.829270] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Creating folder: Instances. Parent ref: group-v18266. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1238.829520] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-972765b8-d373-498e-b8db-c4e449a13c36 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.841121] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Created folder: Instances in parent group-v18266. [ 1238.841409] env[61868]: DEBUG oslo.service.loopingcall [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1238.841591] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1238.841990] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8195e616-783c-45ce-8dc7-f671776e25dc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.865264] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1238.865264] env[61868]: value = "task-41079" [ 1238.865264] env[61868]: _type = "Task" [ 1238.865264] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.875323] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41079, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.027929] env[61868]: DEBUG oslo_vmware.api [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Task: {'id': task-41076, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083257} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.028246] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1239.028451] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1239.028682] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1239.028846] env[61868]: INFO nova.compute.manager [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Took 2.27 seconds to destroy the instance on the hypervisor. [ 1239.029119] env[61868]: DEBUG oslo.service.loopingcall [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1239.029337] env[61868]: DEBUG nova.compute.manager [-] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1239.029453] env[61868]: DEBUG nova.network.neutron [-] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1239.055961] env[61868]: DEBUG nova.network.neutron [-] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1239.064578] env[61868]: DEBUG nova.network.neutron [-] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.076618] env[61868]: INFO nova.compute.manager [-] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Took 0.05 seconds to deallocate network for instance. [ 1239.086875] env[61868]: DEBUG nova.network.neutron [req-123f26f7-6bdc-4202-b68b-a118f5a46427 req-5810bbc6-c440-42de-86f8-e766cab4f4ab service nova] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Updated VIF entry in instance network info cache for port c1f25457-4838-4a36-ae2c-4a60c84796ef. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1239.087240] env[61868]: DEBUG nova.network.neutron [req-123f26f7-6bdc-4202-b68b-a118f5a46427 req-5810bbc6-c440-42de-86f8-e766cab4f4ab service nova] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Updating instance_info_cache with network_info: [{"id": "c1f25457-4838-4a36-ae2c-4a60c84796ef", "address": "fa:16:3e:28:94:65", "network": {"id": "6da51a65-4f1d-44b5-8bb8-b049cebe1cc1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2119750877-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "6f518980782c4dc5ac6efe31af19af16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1f25457-48", "ovs_interfaceid": "c1f25457-4838-4a36-ae2c-4a60c84796ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.098863] env[61868]: DEBUG oslo_concurrency.lockutils [req-123f26f7-6bdc-4202-b68b-a118f5a46427 req-5810bbc6-c440-42de-86f8-e766cab4f4ab service nova] Releasing lock "refresh_cache-4618de15-8f2c-4165-8f23-a4a5542f3d0b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1239.146528] env[61868]: INFO nova.compute.manager [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Took 0.07 seconds to detach 1 volumes for instance. [ 1239.149730] env[61868]: DEBUG nova.compute.manager [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Deleting volume: 6b172ac4-941c-4444-aef5-60dc17d4ad99 {{(pid=61868) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3238}} [ 1239.261069] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1239.261550] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1239.261908] env[61868]: DEBUG nova.objects.instance [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Lazy-loading 'resources' on Instance uuid 16eb032d-fe34-4a46-883c-8b937806d63f {{(pid=61868) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1239.376545] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41079, 'name': CreateVM_Task, 'duration_secs': 0.310524} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.376744] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1239.377343] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1239.377581] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1239.383082] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3691fa66-dabe-4f0c-bc5f-1d0ccc092f2f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.420221] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Reconfiguring VM instance to enable vnc on port - 5902 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 1239.423156] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a867f01-cfe7-4d5f-b01f-1bb186f9217d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.439533] env[61868]: DEBUG oslo_vmware.api [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for the task: (returnval){ [ 1239.439533] env[61868]: value = "task-41081" [ 1239.439533] env[61868]: _type = "Task" [ 1239.439533] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.450443] env[61868]: DEBUG oslo_vmware.api [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': task-41081, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.588326] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ca151f-f487-4a77-bb53-22a94fc74eeb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.595956] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2552b4e5-efea-4542-b921-ff84fcf1831e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.628683] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02413a40-bc27-4818-a913-d475c383e537 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.636913] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3200d57d-8c77-4c1f-aa4f-5e3c3047d0c5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.651342] env[61868]: DEBUG nova.compute.provider_tree [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1239.662583] env[61868]: DEBUG nova.scheduler.client.report [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1239.683040] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.421s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1239.741841] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b27cc074-e27b-4e37-afa3-b2027ca5174b tempest-ServersTestBootFromVolume-333976937 tempest-ServersTestBootFromVolume-333976937-project-member] Lock "16eb032d-fe34-4a46-883c-8b937806d63f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.083s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1239.949435] env[61868]: DEBUG oslo_vmware.api [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': task-41081, 'name': ReconfigVM_Task, 'duration_secs': 0.111444} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.949607] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Reconfigured VM instance to enable vnc on port - 5902 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 1239.949699] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.572s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1239.950024] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1239.950109] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1239.950424] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1239.950675] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4414ee24-0ef6-4d8a-9cb7-d64bcef6a331 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.955586] env[61868]: DEBUG oslo_vmware.api [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for the task: (returnval){ [ 1239.955586] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]5238a7d5-ebb3-3ad7-342c-29b635d988ce" [ 1239.955586] env[61868]: _type = "Task" [ 1239.955586] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.964369] env[61868]: DEBUG oslo_vmware.api [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]5238a7d5-ebb3-3ad7-342c-29b635d988ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.466292] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1240.466622] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1240.466906] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1240.467148] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquired lock "[datastore1] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1240.467521] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1240.467741] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-874ec417-e336-4017-bb72-70a35b7c4bb8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.485126] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1240.485440] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1240.486496] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0fe9304-5c20-48e3-9338-3ad0431e15ac {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.492647] env[61868]: DEBUG oslo_vmware.api [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for the task: (returnval){ [ 1240.492647] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52d5f61e-03d2-5f1a-920c-eb311c3d0436" [ 1240.492647] env[61868]: _type = "Task" [ 1240.492647] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.501338] env[61868]: DEBUG oslo_vmware.api [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52d5f61e-03d2-5f1a-920c-eb311c3d0436, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.003784] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1241.004152] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Creating directory with path [datastore1] vmware_temp/cdf7b053-2961-4ed9-8e0f-4d41c0a74e1a/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1241.004474] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6cf9ac4a-ff34-4a6a-865f-eec876c26cf7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.026272] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Created directory with path [datastore1] vmware_temp/cdf7b053-2961-4ed9-8e0f-4d41c0a74e1a/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1241.026617] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Fetch image to [datastore1] vmware_temp/cdf7b053-2961-4ed9-8e0f-4d41c0a74e1a/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1241.026879] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore1] vmware_temp/cdf7b053-2961-4ed9-8e0f-4d41c0a74e1a/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore1 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1241.027859] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe4445d-0565-4c9e-9c40-fe83efee79f0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.036854] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c64e923e-57db-42b9-a279-9f738507fcdc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.046703] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a093746-5f1d-45fa-95de-9e33128a1105 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.077777] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdbc8d21-b80b-4c2b-b7de-f04be7f6660e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.084430] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-bcd60a5a-13dc-481b-b23f-59e2e052cb30 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.110601] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore1 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1241.161438] env[61868]: DEBUG oslo_vmware.rw_handles [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cdf7b053-2961-4ed9-8e0f-4d41c0a74e1a/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1241.229038] env[61868]: DEBUG oslo_vmware.rw_handles [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1241.229325] env[61868]: DEBUG oslo_vmware.rw_handles [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cdf7b053-2961-4ed9-8e0f-4d41c0a74e1a/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1245.351835] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1246.768459] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b71e3bd4-c371-4b7e-8d04-39b99dda5084 tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Acquiring lock "185855b4-f378-45b6-9603-081b3c1b2c71" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1246.768783] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b71e3bd4-c371-4b7e-8d04-39b99dda5084 tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Lock "185855b4-f378-45b6-9603-081b3c1b2c71" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1246.809679] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b71e3bd4-c371-4b7e-8d04-39b99dda5084 tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Acquiring lock "7c578516-d2b7-4b5e-aaac-5831d2262c44" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1246.810109] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b71e3bd4-c371-4b7e-8d04-39b99dda5084 tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Lock "7c578516-d2b7-4b5e-aaac-5831d2262c44" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1249.351421] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1249.351696] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1249.351728] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 1249.372290] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1249.372483] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1249.372569] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1249.372694] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1249.372818] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1249.372940] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1249.373059] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1249.373174] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1249.373289] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1249.373405] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1249.373522] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 1249.374013] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1249.865796] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b0d4449-25d8-4d70-87bd-1ba64ed36f23 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "4618de15-8f2c-4165-8f23-a4a5542f3d0b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1250.351788] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1250.352152] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1250.375206] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1251.351420] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1253.356669] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1253.368355] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1253.368355] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1253.368355] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1253.368355] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1253.368355] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c3accf-cce4-4134-ae44-9a54bedbe875 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.374324] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6aeee12-2433-4694-a098-cca4a363a279 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.393703] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eaa9b61-4117-4cd0-8a0c-ada6f13c8ee4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.401723] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e76f66-7e97-48d0-bf11-7c9acf8825ab {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.432792] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181936MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1253.433171] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1253.433497] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1253.504053] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4cec72dc-99c1-4cf9-b391-a909bab7fb23 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1253.504220] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 972ab1c7-03b0-4294-930c-8084674083ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1253.504347] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d6ac9ed4-56dd-493a-8d9f-0cfad210b6de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1253.504464] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 3394162c-605f-40a1-9dc8-dc5cba6a083f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1253.504581] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 26f77431-9a5d-444d-b345-10108c34b59b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1253.504696] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance a8e7708c-b9ee-465b-8df8-798983c6f06c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1253.504810] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance a7371133-1ff3-4016-84fc-a59a9ef6d445 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1253.504925] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance b2dbce45-4bfa-4356-b608-e44e5a15c081 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1253.505040] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1253.505150] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4618de15-8f2c-4165-8f23-a4a5542f3d0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1253.516833] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ee1aa6fe-d7bd-412d-b5ae-663032c1b4c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1253.527113] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 05e4476d-5e31-4152-8db9-f24db047eb76 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1253.537011] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance f7e1c6c5-752e-4fef-b84f-232b2dbee4a1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1253.551225] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance eafa3522-51e3-4582-b060-3e3ac4224ae2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1253.576739] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance c9f74904-0558-42e6-a454-c7103b2873b1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1253.604742] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance efb3f108-d3b3-4ebf-a51f-84dc8274f857 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1253.616052] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ebd4070e-7944-4d2f-8668-01d0ceca0c67 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1253.626926] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d6f7828e-6617-40ca-9f6c-e3a72c328dc9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1253.646874] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 185855b4-f378-45b6-9603-081b3c1b2c71 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1253.660985] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 7c578516-d2b7-4b5e-aaac-5831d2262c44 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1253.661940] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1253.662232] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1253.953436] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc54326-443e-482b-900f-57c6f491e6cb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.961627] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4339f9aa-65af-41fb-b1b4-9ad1d48ee689 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.993552] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a274529-b697-4bde-a301-d83d024dbb6a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.001241] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a74f62-8501-4707-a329-e345f828f461 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.023643] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1254.032415] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1254.049647] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1254.049837] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.616s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1256.049848] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1256.050197] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 1256.351617] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1260.718427] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2bc4129f-81b5-4f22-9c18-0d3578b99dc8 tempest-ServersTestManualDisk-772162140 tempest-ServersTestManualDisk-772162140-project-member] Acquiring lock "189f5d4f-7a0e-4d49-a0c9-04e886b35383" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1260.718921] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2bc4129f-81b5-4f22-9c18-0d3578b99dc8 tempest-ServersTestManualDisk-772162140 tempest-ServersTestManualDisk-772162140-project-member] Lock "189f5d4f-7a0e-4d49-a0c9-04e886b35383" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1261.256035] env[61868]: DEBUG oslo_concurrency.lockutils [None req-89d4175e-d337-4144-9493-2ae0536dbaa0 tempest-AttachVolumeTestJSON-2111320188 tempest-AttachVolumeTestJSON-2111320188-project-member] Acquiring lock "8e01cf72-2e56-493d-8723-2e51398a7697" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1261.256332] env[61868]: DEBUG oslo_concurrency.lockutils [None req-89d4175e-d337-4144-9493-2ae0536dbaa0 tempest-AttachVolumeTestJSON-2111320188 tempest-AttachVolumeTestJSON-2111320188-project-member] Lock "8e01cf72-2e56-493d-8723-2e51398a7697" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1275.641401] env[61868]: WARNING oslo_vmware.rw_handles [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1275.641401] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1275.641401] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1275.641401] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1275.641401] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1275.641401] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 1275.641401] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1275.641401] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1275.641401] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1275.641401] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1275.641401] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1275.641401] env[61868]: ERROR oslo_vmware.rw_handles [ 1275.642036] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/13f3f1a3-7563-40c1-9f21-591a0ac1c027/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1275.643547] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1275.643791] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Copying Virtual Disk [datastore2] vmware_temp/13f3f1a3-7563-40c1-9f21-591a0ac1c027/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/13f3f1a3-7563-40c1-9f21-591a0ac1c027/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1275.644096] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f9891e8e-9863-4e3e-b80f-6214e0ad37ff {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.652114] env[61868]: DEBUG oslo_vmware.api [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Waiting for the task: (returnval){ [ 1275.652114] env[61868]: value = "task-41082" [ 1275.652114] env[61868]: _type = "Task" [ 1275.652114] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.662400] env[61868]: DEBUG oslo_vmware.api [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Task: {'id': task-41082, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.163763] env[61868]: DEBUG oslo_vmware.exceptions [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1276.164071] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1276.164636] env[61868]: ERROR nova.compute.manager [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1276.164636] env[61868]: Faults: ['InvalidArgument'] [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Traceback (most recent call last): [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] yield resources [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] self.driver.spawn(context, instance, image_meta, [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] self._fetch_image_if_missing(context, vi) [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] image_cache(vi, tmp_image_ds_loc) [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] vm_util.copy_virtual_disk( [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] session._wait_for_task(vmdk_copy_task) [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] return self.wait_for_task(task_ref) [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] return evt.wait() [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] result = hub.switch() [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] return self.greenlet.switch() [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] self.f(*self.args, **self.kw) [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] raise exceptions.translate_fault(task_info.error) [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Faults: ['InvalidArgument'] [ 1276.164636] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] [ 1276.165411] env[61868]: INFO nova.compute.manager [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Terminating instance [ 1276.166676] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1276.166753] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1276.167322] env[61868]: DEBUG nova.compute.manager [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1276.167509] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1276.168239] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fbba47db-fb05-4823-b1da-9cea7ac5c7fc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.171166] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22752e57-e015-4588-8cae-f52e71a76d68 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.178223] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1276.178449] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fdbe1679-b9ec-4417-997f-381825366d65 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.180848] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1276.181055] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1276.182090] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6635d48-c6c4-4416-a891-bb702c7a06e0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.187488] env[61868]: DEBUG oslo_vmware.api [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Waiting for the task: (returnval){ [ 1276.187488] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52bbe025-7ef3-eaaa-a216-f5553f2e1044" [ 1276.187488] env[61868]: _type = "Task" [ 1276.187488] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.194913] env[61868]: DEBUG oslo_vmware.api [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52bbe025-7ef3-eaaa-a216-f5553f2e1044, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.254634] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1276.254994] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1276.255362] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Deleting the datastore file [datastore2] 4cec72dc-99c1-4cf9-b391-a909bab7fb23 {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1276.255677] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8cbfd9b9-f147-49a8-8fb5-aa2f49e98e69 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.263436] env[61868]: DEBUG oslo_vmware.api [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Waiting for the task: (returnval){ [ 1276.263436] env[61868]: value = "task-41084" [ 1276.263436] env[61868]: _type = "Task" [ 1276.263436] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.273234] env[61868]: DEBUG oslo_vmware.api [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Task: {'id': task-41084, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.698950] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1276.699304] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Creating directory with path [datastore2] vmware_temp/c0682214-982a-4e68-9a14-f8305774a6ef/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1276.699466] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d22d7bff-4265-4512-bcdf-1b24efdc4e39 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.711453] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Created directory with path [datastore2] vmware_temp/c0682214-982a-4e68-9a14-f8305774a6ef/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1276.711656] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Fetch image to [datastore2] vmware_temp/c0682214-982a-4e68-9a14-f8305774a6ef/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1276.711818] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/c0682214-982a-4e68-9a14-f8305774a6ef/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1276.712608] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46547619-2f4f-4306-811c-c42261800a5d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.720624] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e85ae56b-8df1-4c52-8c65-191d64e64b54 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.730262] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52833791-f7c5-4a6b-a337-10709ef82d61 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.763068] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de29987-7d38-4cc9-8366-0d91ff5f2278 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.776100] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-19684b9a-e63f-4fa2-86b9-bad26cf43a35 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.777986] env[61868]: DEBUG oslo_vmware.api [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Task: {'id': task-41084, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080358} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.778260] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1276.778417] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1276.778590] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1276.778908] env[61868]: INFO nova.compute.manager [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1276.781079] env[61868]: DEBUG nova.compute.claims [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1276.781271] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1276.781767] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1276.803103] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1276.869793] env[61868]: DEBUG oslo_vmware.rw_handles [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c0682214-982a-4e68-9a14-f8305774a6ef/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1276.929173] env[61868]: DEBUG oslo_vmware.rw_handles [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1276.929370] env[61868]: DEBUG oslo_vmware.rw_handles [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c0682214-982a-4e68-9a14-f8305774a6ef/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1277.211525] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d48546-8a7e-41dc-b565-e4c790cb6a50 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.216366] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3cfe4a4-c367-4366-8a9a-93fda3132f04 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.252765] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2253fd00-0e11-4ff1-91dc-5276ee5ee0b8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.262073] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c3c4dd3-7b79-4931-be1d-bef3072ebd72 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.277372] env[61868]: DEBUG nova.compute.provider_tree [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1277.286578] env[61868]: DEBUG nova.scheduler.client.report [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1277.302958] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.521s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1277.303559] env[61868]: ERROR nova.compute.manager [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1277.303559] env[61868]: Faults: ['InvalidArgument'] [ 1277.303559] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Traceback (most recent call last): [ 1277.303559] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1277.303559] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] self.driver.spawn(context, instance, image_meta, [ 1277.303559] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1277.303559] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1277.303559] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1277.303559] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] self._fetch_image_if_missing(context, vi) [ 1277.303559] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1277.303559] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] image_cache(vi, tmp_image_ds_loc) [ 1277.303559] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1277.303559] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] vm_util.copy_virtual_disk( [ 1277.303559] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1277.303559] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] session._wait_for_task(vmdk_copy_task) [ 1277.303559] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1277.303559] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] return self.wait_for_task(task_ref) [ 1277.303559] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1277.303559] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] return evt.wait() [ 1277.303559] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1277.303559] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] result = hub.switch() [ 1277.303559] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1277.303559] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] return self.greenlet.switch() [ 1277.303559] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1277.303559] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] self.f(*self.args, **self.kw) [ 1277.303559] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1277.303559] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] raise exceptions.translate_fault(task_info.error) [ 1277.303559] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1277.303559] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Faults: ['InvalidArgument'] [ 1277.303559] env[61868]: ERROR nova.compute.manager [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] [ 1277.304402] env[61868]: DEBUG nova.compute.utils [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1277.305724] env[61868]: DEBUG nova.compute.manager [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Build of instance 4cec72dc-99c1-4cf9-b391-a909bab7fb23 was re-scheduled: A specified parameter was not correct: fileType [ 1277.305724] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1277.306070] env[61868]: DEBUG nova.compute.manager [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1277.306241] env[61868]: DEBUG nova.compute.manager [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1277.306406] env[61868]: DEBUG nova.compute.manager [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1277.306569] env[61868]: DEBUG nova.network.neutron [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1277.585855] env[61868]: DEBUG nova.network.neutron [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1277.602529] env[61868]: INFO nova.compute.manager [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Took 0.30 seconds to deallocate network for instance. [ 1277.739584] env[61868]: INFO nova.scheduler.client.report [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Deleted allocations for instance 4cec72dc-99c1-4cf9-b391-a909bab7fb23 [ 1277.761321] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fc0b63fe-e867-4f45-989b-37a738a62569 tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Lock "4cec72dc-99c1-4cf9-b391-a909bab7fb23" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 666.195s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1277.768089] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0f98003d-3df6-408a-80dd-f8e4ced250ab tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Lock "4cec72dc-99c1-4cf9-b391-a909bab7fb23" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 467.210s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1277.768089] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0f98003d-3df6-408a-80dd-f8e4ced250ab tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Acquiring lock "4cec72dc-99c1-4cf9-b391-a909bab7fb23-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1277.768089] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0f98003d-3df6-408a-80dd-f8e4ced250ab tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Lock "4cec72dc-99c1-4cf9-b391-a909bab7fb23-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1277.768089] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0f98003d-3df6-408a-80dd-f8e4ced250ab tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Lock "4cec72dc-99c1-4cf9-b391-a909bab7fb23-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1277.768089] env[61868]: INFO nova.compute.manager [None req-0f98003d-3df6-408a-80dd-f8e4ced250ab tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Terminating instance [ 1277.768089] env[61868]: DEBUG nova.compute.manager [None req-0f98003d-3df6-408a-80dd-f8e4ced250ab tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1277.768089] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-0f98003d-3df6-408a-80dd-f8e4ced250ab tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1277.768089] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fa1d4a84-f94f-4cf5-8e6b-67eabc387f6e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.778881] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3439599-94b0-4392-b054-98630fa9150e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.790340] env[61868]: DEBUG nova.compute.manager [None req-149bb090-1f18-441d-bf94-873475c650f3 tempest-AttachInterfacesTestJSON-327680650 tempest-AttachInterfacesTestJSON-327680650-project-member] [instance: 31f60d0d-900f-4034-b954-00a219e223e7] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1277.811053] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-0f98003d-3df6-408a-80dd-f8e4ced250ab tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4cec72dc-99c1-4cf9-b391-a909bab7fb23 could not be found. [ 1277.811289] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-0f98003d-3df6-408a-80dd-f8e4ced250ab tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1277.811487] env[61868]: INFO nova.compute.manager [None req-0f98003d-3df6-408a-80dd-f8e4ced250ab tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1277.811718] env[61868]: DEBUG oslo.service.loopingcall [None req-0f98003d-3df6-408a-80dd-f8e4ced250ab tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1277.811949] env[61868]: DEBUG nova.compute.manager [-] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1277.812061] env[61868]: DEBUG nova.network.neutron [-] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1277.821581] env[61868]: DEBUG nova.compute.manager [None req-149bb090-1f18-441d-bf94-873475c650f3 tempest-AttachInterfacesTestJSON-327680650 tempest-AttachInterfacesTestJSON-327680650-project-member] [instance: 31f60d0d-900f-4034-b954-00a219e223e7] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1277.849496] env[61868]: DEBUG nova.network.neutron [-] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1277.851434] env[61868]: DEBUG oslo_concurrency.lockutils [None req-149bb090-1f18-441d-bf94-873475c650f3 tempest-AttachInterfacesTestJSON-327680650 tempest-AttachInterfacesTestJSON-327680650-project-member] Lock "31f60d0d-900f-4034-b954-00a219e223e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.709s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1277.858264] env[61868]: INFO nova.compute.manager [-] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] Took 0.05 seconds to deallocate network for instance. [ 1277.873818] env[61868]: DEBUG nova.compute.manager [None req-c0807466-d316-4988-8385-a1fb7a3d03b8 tempest-AttachVolumeTestJSON-2111320188 tempest-AttachVolumeTestJSON-2111320188-project-member] [instance: ee1aa6fe-d7bd-412d-b5ae-663032c1b4c7] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1277.895671] env[61868]: DEBUG nova.compute.manager [None req-c0807466-d316-4988-8385-a1fb7a3d03b8 tempest-AttachVolumeTestJSON-2111320188 tempest-AttachVolumeTestJSON-2111320188-project-member] [instance: ee1aa6fe-d7bd-412d-b5ae-663032c1b4c7] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1277.940272] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c0807466-d316-4988-8385-a1fb7a3d03b8 tempest-AttachVolumeTestJSON-2111320188 tempest-AttachVolumeTestJSON-2111320188-project-member] Lock "ee1aa6fe-d7bd-412d-b5ae-663032c1b4c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.651s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1277.957298] env[61868]: DEBUG nova.compute.manager [None req-6819ef03-8f1b-49b7-833d-d4b198ed63c1 tempest-ServerActionsV293TestJSON-1559847917 tempest-ServerActionsV293TestJSON-1559847917-project-member] [instance: 05e4476d-5e31-4152-8db9-f24db047eb76] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1277.994992] env[61868]: DEBUG nova.compute.manager [None req-6819ef03-8f1b-49b7-833d-d4b198ed63c1 tempest-ServerActionsV293TestJSON-1559847917 tempest-ServerActionsV293TestJSON-1559847917-project-member] [instance: 05e4476d-5e31-4152-8db9-f24db047eb76] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1278.005254] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0f98003d-3df6-408a-80dd-f8e4ced250ab tempest-InstanceActionsTestJSON-555491356 tempest-InstanceActionsTestJSON-555491356-project-member] Lock "4cec72dc-99c1-4cf9-b391-a909bab7fb23" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.243s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1278.006112] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "4cec72dc-99c1-4cf9-b391-a909bab7fb23" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 128.347s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1278.006293] env[61868]: INFO nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4cec72dc-99c1-4cf9-b391-a909bab7fb23] During sync_power_state the instance has a pending task (deleting). Skip. [ 1278.006458] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "4cec72dc-99c1-4cf9-b391-a909bab7fb23" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1278.019861] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6819ef03-8f1b-49b7-833d-d4b198ed63c1 tempest-ServerActionsV293TestJSON-1559847917 tempest-ServerActionsV293TestJSON-1559847917-project-member] Lock "05e4476d-5e31-4152-8db9-f24db047eb76" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.532s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1278.035125] env[61868]: DEBUG nova.compute.manager [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1278.095450] env[61868]: DEBUG oslo_concurrency.lockutils [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1278.095914] env[61868]: DEBUG oslo_concurrency.lockutils [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1278.097602] env[61868]: INFO nova.compute.claims [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1278.438739] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e75fff-5587-4539-a0de-6fd06333a2f7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.446950] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12897ef6-b728-4432-9b75-1ea5fb760dd9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.477461] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f136a5-3e02-41bd-a7a2-c5569fe321cf {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.486001] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a49865b2-16fd-4cc0-9f20-5d3aee812b43 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.502544] env[61868]: DEBUG nova.compute.provider_tree [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1278.512434] env[61868]: DEBUG nova.scheduler.client.report [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1278.529406] env[61868]: DEBUG oslo_concurrency.lockutils [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.433s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1278.529931] env[61868]: DEBUG nova.compute.manager [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1278.570119] env[61868]: DEBUG nova.compute.utils [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1278.571803] env[61868]: DEBUG nova.compute.manager [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1278.571971] env[61868]: DEBUG nova.network.neutron [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1278.584057] env[61868]: DEBUG nova.compute.manager [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1278.621361] env[61868]: DEBUG nova.policy [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '42563ff3e832401b9c7a69c9a3feebaa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a33cb95d89ad4e1c8aacebb2a9e16009', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 1278.662611] env[61868]: DEBUG nova.compute.manager [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1278.686236] env[61868]: DEBUG nova.virt.hardware [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1278.686466] env[61868]: DEBUG nova.virt.hardware [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1278.686616] env[61868]: DEBUG nova.virt.hardware [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1278.686797] env[61868]: DEBUG nova.virt.hardware [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1278.686980] env[61868]: DEBUG nova.virt.hardware [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1278.687080] env[61868]: DEBUG nova.virt.hardware [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1278.687273] env[61868]: DEBUG nova.virt.hardware [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1278.687455] env[61868]: DEBUG nova.virt.hardware [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1278.687673] env[61868]: DEBUG nova.virt.hardware [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1278.687840] env[61868]: DEBUG nova.virt.hardware [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1278.688013] env[61868]: DEBUG nova.virt.hardware [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1278.688897] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f712cc3-75b8-4317-8c1d-3a5dc1066eb3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.697821] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c79cc0-8d6a-44a0-b8e8-bbe4c009adff {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.119280] env[61868]: DEBUG nova.network.neutron [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Successfully created port: e29cbb11-cf7f-41b5-989c-ea7b0d3707e4 {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1279.742929] env[61868]: DEBUG nova.network.neutron [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Successfully updated port: e29cbb11-cf7f-41b5-989c-ea7b0d3707e4 {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1279.764558] env[61868]: DEBUG oslo_concurrency.lockutils [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "refresh_cache-f7e1c6c5-752e-4fef-b84f-232b2dbee4a1" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1279.764757] env[61868]: DEBUG oslo_concurrency.lockutils [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquired lock "refresh_cache-f7e1c6c5-752e-4fef-b84f-232b2dbee4a1" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1279.764906] env[61868]: DEBUG nova.network.neutron [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1279.804577] env[61868]: DEBUG nova.network.neutron [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1279.950096] env[61868]: DEBUG nova.compute.manager [req-d52763d2-0533-4241-a260-ddc8744a162f req-f2a81663-aa2d-4b6b-9434-e06be63156e1 service nova] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Received event network-vif-plugged-e29cbb11-cf7f-41b5-989c-ea7b0d3707e4 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1279.950322] env[61868]: DEBUG oslo_concurrency.lockutils [req-d52763d2-0533-4241-a260-ddc8744a162f req-f2a81663-aa2d-4b6b-9434-e06be63156e1 service nova] Acquiring lock "f7e1c6c5-752e-4fef-b84f-232b2dbee4a1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1279.950530] env[61868]: DEBUG oslo_concurrency.lockutils [req-d52763d2-0533-4241-a260-ddc8744a162f req-f2a81663-aa2d-4b6b-9434-e06be63156e1 service nova] Lock "f7e1c6c5-752e-4fef-b84f-232b2dbee4a1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1279.950697] env[61868]: DEBUG oslo_concurrency.lockutils [req-d52763d2-0533-4241-a260-ddc8744a162f req-f2a81663-aa2d-4b6b-9434-e06be63156e1 service nova] Lock "f7e1c6c5-752e-4fef-b84f-232b2dbee4a1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1279.950889] env[61868]: DEBUG nova.compute.manager [req-d52763d2-0533-4241-a260-ddc8744a162f req-f2a81663-aa2d-4b6b-9434-e06be63156e1 service nova] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] No waiting events found dispatching network-vif-plugged-e29cbb11-cf7f-41b5-989c-ea7b0d3707e4 {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1279.951067] env[61868]: WARNING nova.compute.manager [req-d52763d2-0533-4241-a260-ddc8744a162f req-f2a81663-aa2d-4b6b-9434-e06be63156e1 service nova] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Received unexpected event network-vif-plugged-e29cbb11-cf7f-41b5-989c-ea7b0d3707e4 for instance with vm_state building and task_state spawning. [ 1279.951230] env[61868]: DEBUG nova.compute.manager [req-d52763d2-0533-4241-a260-ddc8744a162f req-f2a81663-aa2d-4b6b-9434-e06be63156e1 service nova] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Received event network-changed-e29cbb11-cf7f-41b5-989c-ea7b0d3707e4 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1279.951374] env[61868]: DEBUG nova.compute.manager [req-d52763d2-0533-4241-a260-ddc8744a162f req-f2a81663-aa2d-4b6b-9434-e06be63156e1 service nova] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Refreshing instance network info cache due to event network-changed-e29cbb11-cf7f-41b5-989c-ea7b0d3707e4. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1279.951539] env[61868]: DEBUG oslo_concurrency.lockutils [req-d52763d2-0533-4241-a260-ddc8744a162f req-f2a81663-aa2d-4b6b-9434-e06be63156e1 service nova] Acquiring lock "refresh_cache-f7e1c6c5-752e-4fef-b84f-232b2dbee4a1" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1279.975371] env[61868]: DEBUG nova.network.neutron [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Updating instance_info_cache with network_info: [{"id": "e29cbb11-cf7f-41b5-989c-ea7b0d3707e4", "address": "fa:16:3e:3d:82:84", "network": {"id": "af866156-816c-4427-a004-8bf63f54c0c2", "bridge": "br-int", "label": "tempest-ServersTestJSON-821930294-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "a33cb95d89ad4e1c8aacebb2a9e16009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba6157eb-73cb-428a-9f46-99081165d7eb", "external-id": "nsx-vlan-transportzone-463", "segmentation_id": 463, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape29cbb11-cf", "ovs_interfaceid": "e29cbb11-cf7f-41b5-989c-ea7b0d3707e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1279.989649] env[61868]: DEBUG oslo_concurrency.lockutils [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Releasing lock "refresh_cache-f7e1c6c5-752e-4fef-b84f-232b2dbee4a1" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1279.990037] env[61868]: DEBUG nova.compute.manager [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Instance network_info: |[{"id": "e29cbb11-cf7f-41b5-989c-ea7b0d3707e4", "address": "fa:16:3e:3d:82:84", "network": {"id": "af866156-816c-4427-a004-8bf63f54c0c2", "bridge": "br-int", "label": "tempest-ServersTestJSON-821930294-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "a33cb95d89ad4e1c8aacebb2a9e16009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba6157eb-73cb-428a-9f46-99081165d7eb", "external-id": "nsx-vlan-transportzone-463", "segmentation_id": 463, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape29cbb11-cf", "ovs_interfaceid": "e29cbb11-cf7f-41b5-989c-ea7b0d3707e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1279.990345] env[61868]: DEBUG oslo_concurrency.lockutils [req-d52763d2-0533-4241-a260-ddc8744a162f req-f2a81663-aa2d-4b6b-9434-e06be63156e1 service nova] Acquired lock "refresh_cache-f7e1c6c5-752e-4fef-b84f-232b2dbee4a1" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1279.990531] env[61868]: DEBUG nova.network.neutron [req-d52763d2-0533-4241-a260-ddc8744a162f req-f2a81663-aa2d-4b6b-9434-e06be63156e1 service nova] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Refreshing network info cache for port e29cbb11-cf7f-41b5-989c-ea7b0d3707e4 {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1279.991822] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:82:84', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ba6157eb-73cb-428a-9f46-99081165d7eb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e29cbb11-cf7f-41b5-989c-ea7b0d3707e4', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1279.999800] env[61868]: DEBUG oslo.service.loopingcall [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1280.003937] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1280.003937] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cb64d060-103c-4860-8858-127d1adf0e3e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.025391] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1280.025391] env[61868]: value = "task-41085" [ 1280.025391] env[61868]: _type = "Task" [ 1280.025391] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.035836] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41085, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.271462] env[61868]: DEBUG nova.network.neutron [req-d52763d2-0533-4241-a260-ddc8744a162f req-f2a81663-aa2d-4b6b-9434-e06be63156e1 service nova] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Updated VIF entry in instance network info cache for port e29cbb11-cf7f-41b5-989c-ea7b0d3707e4. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1280.272304] env[61868]: DEBUG nova.network.neutron [req-d52763d2-0533-4241-a260-ddc8744a162f req-f2a81663-aa2d-4b6b-9434-e06be63156e1 service nova] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Updating instance_info_cache with network_info: [{"id": "e29cbb11-cf7f-41b5-989c-ea7b0d3707e4", "address": "fa:16:3e:3d:82:84", "network": {"id": "af866156-816c-4427-a004-8bf63f54c0c2", "bridge": "br-int", "label": "tempest-ServersTestJSON-821930294-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "a33cb95d89ad4e1c8aacebb2a9e16009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba6157eb-73cb-428a-9f46-99081165d7eb", "external-id": "nsx-vlan-transportzone-463", "segmentation_id": 463, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape29cbb11-cf", "ovs_interfaceid": "e29cbb11-cf7f-41b5-989c-ea7b0d3707e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1280.283834] env[61868]: DEBUG oslo_concurrency.lockutils [req-d52763d2-0533-4241-a260-ddc8744a162f req-f2a81663-aa2d-4b6b-9434-e06be63156e1 service nova] Releasing lock "refresh_cache-f7e1c6c5-752e-4fef-b84f-232b2dbee4a1" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1280.537060] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41085, 'name': CreateVM_Task, 'duration_secs': 0.349984} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.537325] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1280.537957] env[61868]: DEBUG oslo_concurrency.lockutils [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1280.538234] env[61868]: DEBUG oslo_concurrency.lockutils [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1280.541386] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e4809a-717e-41d0-b9ac-ff0243ac28ae {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.574321] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Reconfiguring VM instance to enable vnc on port - 5903 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 1280.574751] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd24c868-f9ba-4b92-8d2b-9e1fa3d2c557 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.591813] env[61868]: DEBUG oslo_vmware.api [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Waiting for the task: (returnval){ [ 1280.591813] env[61868]: value = "task-41086" [ 1280.591813] env[61868]: _type = "Task" [ 1280.591813] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.601256] env[61868]: DEBUG oslo_vmware.api [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Task: {'id': task-41086, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.102257] env[61868]: DEBUG oslo_vmware.api [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Task: {'id': task-41086, 'name': ReconfigVM_Task, 'duration_secs': 0.108625} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.102545] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Reconfigured VM instance to enable vnc on port - 5903 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 1281.102757] env[61868]: DEBUG oslo_concurrency.lockutils [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.565s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1281.102997] env[61868]: DEBUG oslo_concurrency.lockutils [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1281.103142] env[61868]: DEBUG oslo_concurrency.lockutils [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1281.103457] env[61868]: DEBUG oslo_concurrency.lockutils [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1281.103706] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-168701b4-e5c8-4f77-94d7-cb9293ea6632 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.108645] env[61868]: DEBUG oslo_vmware.api [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Waiting for the task: (returnval){ [ 1281.108645] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]525b8bf4-e3ae-a611-7429-cd081484306a" [ 1281.108645] env[61868]: _type = "Task" [ 1281.108645] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.117624] env[61868]: DEBUG oslo_vmware.api [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]525b8bf4-e3ae-a611-7429-cd081484306a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.619455] env[61868]: DEBUG oslo_concurrency.lockutils [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1281.619795] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1281.619922] env[61868]: DEBUG oslo_concurrency.lockutils [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1284.210111] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d74f9312-c1c0-4e7e-9111-e2948880b17b tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "f7e1c6c5-752e-4fef-b84f-232b2dbee4a1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1291.243246] env[61868]: WARNING oslo_vmware.rw_handles [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1291.243246] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1291.243246] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1291.243246] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1291.243246] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1291.243246] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 1291.243246] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1291.243246] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1291.243246] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1291.243246] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1291.243246] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1291.243246] env[61868]: ERROR oslo_vmware.rw_handles [ 1291.244076] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/cdf7b053-2961-4ed9-8e0f-4d41c0a74e1a/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore1 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1291.244999] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1291.245242] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Copying Virtual Disk [datastore1] vmware_temp/cdf7b053-2961-4ed9-8e0f-4d41c0a74e1a/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore1] vmware_temp/cdf7b053-2961-4ed9-8e0f-4d41c0a74e1a/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1291.245530] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-96763077-7049-4b5e-9025-434cfe534104 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.254179] env[61868]: DEBUG oslo_vmware.api [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for the task: (returnval){ [ 1291.254179] env[61868]: value = "task-41087" [ 1291.254179] env[61868]: _type = "Task" [ 1291.254179] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.263318] env[61868]: DEBUG oslo_vmware.api [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': task-41087, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.764432] env[61868]: DEBUG oslo_vmware.exceptions [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1291.764730] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Releasing lock "[datastore1] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1291.765302] env[61868]: ERROR nova.compute.manager [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1291.765302] env[61868]: Faults: ['InvalidArgument'] [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Traceback (most recent call last): [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] yield resources [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] self.driver.spawn(context, instance, image_meta, [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] self._fetch_image_if_missing(context, vi) [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] image_cache(vi, tmp_image_ds_loc) [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] vm_util.copy_virtual_disk( [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] session._wait_for_task(vmdk_copy_task) [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] return self.wait_for_task(task_ref) [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] return evt.wait() [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] result = hub.switch() [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] return self.greenlet.switch() [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] self.f(*self.args, **self.kw) [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] raise exceptions.translate_fault(task_info.error) [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Faults: ['InvalidArgument'] [ 1291.765302] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] [ 1291.766351] env[61868]: INFO nova.compute.manager [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Terminating instance [ 1291.768402] env[61868]: DEBUG nova.compute.manager [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1291.768604] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1291.769433] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daac0cc5-13e4-462b-b694-17a1f1e3da52 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.778401] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1291.778711] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-995ecbbc-248b-4457-9b71-4104f921b626 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.849081] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1291.849326] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Deleting contents of the VM from datastore datastore1 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1291.849514] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Deleting the datastore file [datastore1] 4618de15-8f2c-4165-8f23-a4a5542f3d0b {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1291.849794] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e340da2b-4f97-4720-8add-8d4110675bfb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.856886] env[61868]: DEBUG oslo_vmware.api [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for the task: (returnval){ [ 1291.856886] env[61868]: value = "task-41089" [ 1291.856886] env[61868]: _type = "Task" [ 1291.856886] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.865400] env[61868]: DEBUG oslo_vmware.api [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': task-41089, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.367341] env[61868]: DEBUG oslo_vmware.api [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': task-41089, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072057} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.367628] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1292.367871] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Deleted contents of the VM from datastore datastore1 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1292.368082] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1292.368303] env[61868]: INFO nova.compute.manager [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1292.370634] env[61868]: DEBUG nova.compute.claims [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1292.370885] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1292.371227] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1292.649153] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b693850f-c384-451e-a29f-5937621db40b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.657154] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2182809-f822-443d-a810-e229618dce11 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.687315] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f87fa009-5feb-44d5-8d93-95bade7dfd04 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.695330] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad792c64-a985-4f80-bb9a-f2b95ac92acf {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.709003] env[61868]: DEBUG nova.compute.provider_tree [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1292.718031] env[61868]: DEBUG nova.scheduler.client.report [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1292.735384] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.364s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1292.735920] env[61868]: ERROR nova.compute.manager [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1292.735920] env[61868]: Faults: ['InvalidArgument'] [ 1292.735920] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Traceback (most recent call last): [ 1292.735920] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1292.735920] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] self.driver.spawn(context, instance, image_meta, [ 1292.735920] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1292.735920] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1292.735920] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1292.735920] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] self._fetch_image_if_missing(context, vi) [ 1292.735920] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1292.735920] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] image_cache(vi, tmp_image_ds_loc) [ 1292.735920] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1292.735920] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] vm_util.copy_virtual_disk( [ 1292.735920] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1292.735920] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] session._wait_for_task(vmdk_copy_task) [ 1292.735920] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1292.735920] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] return self.wait_for_task(task_ref) [ 1292.735920] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1292.735920] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] return evt.wait() [ 1292.735920] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1292.735920] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] result = hub.switch() [ 1292.735920] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1292.735920] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] return self.greenlet.switch() [ 1292.735920] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1292.735920] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] self.f(*self.args, **self.kw) [ 1292.735920] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1292.735920] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] raise exceptions.translate_fault(task_info.error) [ 1292.735920] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1292.735920] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Faults: ['InvalidArgument'] [ 1292.735920] env[61868]: ERROR nova.compute.manager [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] [ 1292.736665] env[61868]: DEBUG nova.compute.utils [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1292.738011] env[61868]: DEBUG nova.compute.manager [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Build of instance 4618de15-8f2c-4165-8f23-a4a5542f3d0b was re-scheduled: A specified parameter was not correct: fileType [ 1292.738011] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1292.738383] env[61868]: DEBUG nova.compute.manager [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1292.738556] env[61868]: DEBUG nova.compute.manager [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1292.738746] env[61868]: DEBUG nova.compute.manager [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1292.738927] env[61868]: DEBUG nova.network.neutron [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1293.023201] env[61868]: DEBUG nova.network.neutron [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1293.038839] env[61868]: INFO nova.compute.manager [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Took 0.30 seconds to deallocate network for instance. [ 1293.139323] env[61868]: INFO nova.scheduler.client.report [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Deleted allocations for instance 4618de15-8f2c-4165-8f23-a4a5542f3d0b [ 1293.157730] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9ae51c32-dc39-4b24-8311-1cefd5078884 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "4618de15-8f2c-4165-8f23-a4a5542f3d0b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.654s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1293.159280] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b0d4449-25d8-4d70-87bd-1ba64ed36f23 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "4618de15-8f2c-4165-8f23-a4a5542f3d0b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 43.294s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1293.159492] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b0d4449-25d8-4d70-87bd-1ba64ed36f23 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "4618de15-8f2c-4165-8f23-a4a5542f3d0b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1293.159697] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b0d4449-25d8-4d70-87bd-1ba64ed36f23 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "4618de15-8f2c-4165-8f23-a4a5542f3d0b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1293.159863] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b0d4449-25d8-4d70-87bd-1ba64ed36f23 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "4618de15-8f2c-4165-8f23-a4a5542f3d0b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1293.162382] env[61868]: INFO nova.compute.manager [None req-8b0d4449-25d8-4d70-87bd-1ba64ed36f23 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Terminating instance [ 1293.170179] env[61868]: DEBUG nova.compute.manager [None req-8b0d4449-25d8-4d70-87bd-1ba64ed36f23 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1293.170436] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0d4449-25d8-4d70-87bd-1ba64ed36f23 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1293.170706] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-55ca3cef-68c0-4888-9a8c-c886c2581ab2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.173520] env[61868]: DEBUG nova.compute.manager [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1293.183792] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb8b8745-54ad-4b4c-b5e5-fdf24db536f2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.216025] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-8b0d4449-25d8-4d70-87bd-1ba64ed36f23 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4618de15-8f2c-4165-8f23-a4a5542f3d0b could not be found. [ 1293.216170] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0d4449-25d8-4d70-87bd-1ba64ed36f23 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1293.216310] env[61868]: INFO nova.compute.manager [None req-8b0d4449-25d8-4d70-87bd-1ba64ed36f23 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1293.216563] env[61868]: DEBUG oslo.service.loopingcall [None req-8b0d4449-25d8-4d70-87bd-1ba64ed36f23 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1293.216829] env[61868]: DEBUG nova.compute.manager [-] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1293.216930] env[61868]: DEBUG nova.network.neutron [-] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1293.235642] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1293.235891] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1293.237449] env[61868]: INFO nova.compute.claims [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1293.248690] env[61868]: DEBUG nova.network.neutron [-] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1293.264849] env[61868]: INFO nova.compute.manager [-] [instance: 4618de15-8f2c-4165-8f23-a4a5542f3d0b] Took 0.05 seconds to deallocate network for instance. [ 1293.362693] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8b0d4449-25d8-4d70-87bd-1ba64ed36f23 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "4618de15-8f2c-4165-8f23-a4a5542f3d0b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.203s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1293.529241] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a60f02-8ef3-457d-a491-e766e5017e1b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.537290] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-212bb4e8-9441-4d83-8e28-efc1f4635d4a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.569919] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0987775-bca3-4a99-ba54-ee518d8f3c10 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.578606] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b35e629-b8f4-4de8-9fe4-d671846eb6c6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.593201] env[61868]: DEBUG nova.compute.provider_tree [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1293.601374] env[61868]: DEBUG nova.scheduler.client.report [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1293.617481] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.381s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1293.618213] env[61868]: DEBUG nova.compute.manager [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1293.657883] env[61868]: DEBUG nova.compute.utils [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1293.659492] env[61868]: DEBUG nova.compute.manager [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1293.659824] env[61868]: DEBUG nova.network.neutron [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1293.673482] env[61868]: DEBUG nova.compute.manager [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1293.708968] env[61868]: DEBUG nova.policy [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '849d4993969148199ca906533cbf36f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '50b4dcd22f6b411bbb5e34b0ddfaf12b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 1293.749455] env[61868]: DEBUG nova.compute.manager [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1293.772303] env[61868]: DEBUG nova.virt.hardware [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1293.773051] env[61868]: DEBUG nova.virt.hardware [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1293.773266] env[61868]: DEBUG nova.virt.hardware [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1293.773475] env[61868]: DEBUG nova.virt.hardware [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1293.773626] env[61868]: DEBUG nova.virt.hardware [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1293.773774] env[61868]: DEBUG nova.virt.hardware [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1293.773986] env[61868]: DEBUG nova.virt.hardware [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1293.774157] env[61868]: DEBUG nova.virt.hardware [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1293.774326] env[61868]: DEBUG nova.virt.hardware [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1293.774491] env[61868]: DEBUG nova.virt.hardware [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1293.774665] env[61868]: DEBUG nova.virt.hardware [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1293.775523] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62f48b5a-dd99-4f4b-bf8f-87fb5d17dee3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.784594] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d08747e3-49e2-4e3d-9272-d71de9d7057e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.025725] env[61868]: DEBUG nova.network.neutron [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Successfully created port: 4aaea5b2-3b79-47a7-98c6-2653d175f630 {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1294.396300] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "95ba0df5-846c-4e5d-94e6-fd9c43dcc191" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1294.397159] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "95ba0df5-846c-4e5d-94e6-fd9c43dcc191" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1294.592040] env[61868]: DEBUG nova.network.neutron [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Successfully updated port: 4aaea5b2-3b79-47a7-98c6-2653d175f630 {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1294.632642] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Acquiring lock "refresh_cache-eafa3522-51e3-4582-b060-3e3ac4224ae2" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1294.632785] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Acquired lock "refresh_cache-eafa3522-51e3-4582-b060-3e3ac4224ae2" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1294.632932] env[61868]: DEBUG nova.network.neutron [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1294.675964] env[61868]: DEBUG nova.network.neutron [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1294.831771] env[61868]: DEBUG nova.network.neutron [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Updating instance_info_cache with network_info: [{"id": "4aaea5b2-3b79-47a7-98c6-2653d175f630", "address": "fa:16:3e:ad:70:a3", "network": {"id": "7a823f29-5526-4be1-92e6-0be26a7f3023", "bridge": "br-int", "label": "tempest-TaggedAttachmentsTest-830227066-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "50b4dcd22f6b411bbb5e34b0ddfaf12b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4aaea5b2-3b", "ovs_interfaceid": "4aaea5b2-3b79-47a7-98c6-2653d175f630", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1294.845781] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Releasing lock "refresh_cache-eafa3522-51e3-4582-b060-3e3ac4224ae2" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1294.846081] env[61868]: DEBUG nova.compute.manager [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Instance network_info: |[{"id": "4aaea5b2-3b79-47a7-98c6-2653d175f630", "address": "fa:16:3e:ad:70:a3", "network": {"id": "7a823f29-5526-4be1-92e6-0be26a7f3023", "bridge": "br-int", "label": "tempest-TaggedAttachmentsTest-830227066-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "50b4dcd22f6b411bbb5e34b0ddfaf12b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4aaea5b2-3b", "ovs_interfaceid": "4aaea5b2-3b79-47a7-98c6-2653d175f630", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1294.846506] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:70:a3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e2e8b74b-aa27-4f31-9414-7bcf531e8642', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4aaea5b2-3b79-47a7-98c6-2653d175f630', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1294.854752] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Creating folder: Project (50b4dcd22f6b411bbb5e34b0ddfaf12b). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1294.855346] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b5762985-42e0-4563-8cbb-0e3c538bb7e8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.867665] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Created folder: Project (50b4dcd22f6b411bbb5e34b0ddfaf12b) in parent group-v18181. [ 1294.867907] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Creating folder: Instances. Parent ref: group-v18270. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1294.868165] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-101f23b7-9a07-4707-bb1c-088812b80337 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.877195] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Created folder: Instances in parent group-v18270. [ 1294.877491] env[61868]: DEBUG oslo.service.loopingcall [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1294.877704] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1294.877926] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-80d72b07-2e7a-4fcc-bad4-49a19291d929 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.897064] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1294.897064] env[61868]: value = "task-41092" [ 1294.897064] env[61868]: _type = "Task" [ 1294.897064] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.905454] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41092, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.121659] env[61868]: DEBUG nova.compute.manager [req-a6903ea8-6df5-46e7-ae95-cc6034af458e req-316917fc-3225-45c1-92be-060181fceb9b service nova] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Received event network-vif-plugged-4aaea5b2-3b79-47a7-98c6-2653d175f630 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1295.122002] env[61868]: DEBUG oslo_concurrency.lockutils [req-a6903ea8-6df5-46e7-ae95-cc6034af458e req-316917fc-3225-45c1-92be-060181fceb9b service nova] Acquiring lock "eafa3522-51e3-4582-b060-3e3ac4224ae2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1295.122282] env[61868]: DEBUG oslo_concurrency.lockutils [req-a6903ea8-6df5-46e7-ae95-cc6034af458e req-316917fc-3225-45c1-92be-060181fceb9b service nova] Lock "eafa3522-51e3-4582-b060-3e3ac4224ae2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1295.122481] env[61868]: DEBUG oslo_concurrency.lockutils [req-a6903ea8-6df5-46e7-ae95-cc6034af458e req-316917fc-3225-45c1-92be-060181fceb9b service nova] Lock "eafa3522-51e3-4582-b060-3e3ac4224ae2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1295.122680] env[61868]: DEBUG nova.compute.manager [req-a6903ea8-6df5-46e7-ae95-cc6034af458e req-316917fc-3225-45c1-92be-060181fceb9b service nova] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] No waiting events found dispatching network-vif-plugged-4aaea5b2-3b79-47a7-98c6-2653d175f630 {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1295.122856] env[61868]: WARNING nova.compute.manager [req-a6903ea8-6df5-46e7-ae95-cc6034af458e req-316917fc-3225-45c1-92be-060181fceb9b service nova] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Received unexpected event network-vif-plugged-4aaea5b2-3b79-47a7-98c6-2653d175f630 for instance with vm_state building and task_state spawning. [ 1295.123025] env[61868]: DEBUG nova.compute.manager [req-a6903ea8-6df5-46e7-ae95-cc6034af458e req-316917fc-3225-45c1-92be-060181fceb9b service nova] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Received event network-changed-4aaea5b2-3b79-47a7-98c6-2653d175f630 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1295.123270] env[61868]: DEBUG nova.compute.manager [req-a6903ea8-6df5-46e7-ae95-cc6034af458e req-316917fc-3225-45c1-92be-060181fceb9b service nova] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Refreshing instance network info cache due to event network-changed-4aaea5b2-3b79-47a7-98c6-2653d175f630. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1295.123480] env[61868]: DEBUG oslo_concurrency.lockutils [req-a6903ea8-6df5-46e7-ae95-cc6034af458e req-316917fc-3225-45c1-92be-060181fceb9b service nova] Acquiring lock "refresh_cache-eafa3522-51e3-4582-b060-3e3ac4224ae2" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1295.123618] env[61868]: DEBUG oslo_concurrency.lockutils [req-a6903ea8-6df5-46e7-ae95-cc6034af458e req-316917fc-3225-45c1-92be-060181fceb9b service nova] Acquired lock "refresh_cache-eafa3522-51e3-4582-b060-3e3ac4224ae2" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1295.123779] env[61868]: DEBUG nova.network.neutron [req-a6903ea8-6df5-46e7-ae95-cc6034af458e req-316917fc-3225-45c1-92be-060181fceb9b service nova] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Refreshing network info cache for port 4aaea5b2-3b79-47a7-98c6-2653d175f630 {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1295.406765] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41092, 'name': CreateVM_Task, 'duration_secs': 0.303407} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.406960] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1295.407547] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1295.407789] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1295.410704] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec7a79cd-84ff-418d-a5e4-4f20a1324fb6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.421443] env[61868]: DEBUG nova.network.neutron [req-a6903ea8-6df5-46e7-ae95-cc6034af458e req-316917fc-3225-45c1-92be-060181fceb9b service nova] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Updated VIF entry in instance network info cache for port 4aaea5b2-3b79-47a7-98c6-2653d175f630. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1295.421769] env[61868]: DEBUG nova.network.neutron [req-a6903ea8-6df5-46e7-ae95-cc6034af458e req-316917fc-3225-45c1-92be-060181fceb9b service nova] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Updating instance_info_cache with network_info: [{"id": "4aaea5b2-3b79-47a7-98c6-2653d175f630", "address": "fa:16:3e:ad:70:a3", "network": {"id": "7a823f29-5526-4be1-92e6-0be26a7f3023", "bridge": "br-int", "label": "tempest-TaggedAttachmentsTest-830227066-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "50b4dcd22f6b411bbb5e34b0ddfaf12b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4aaea5b2-3b", "ovs_interfaceid": "4aaea5b2-3b79-47a7-98c6-2653d175f630", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1295.432216] env[61868]: DEBUG oslo_concurrency.lockutils [req-a6903ea8-6df5-46e7-ae95-cc6034af458e req-316917fc-3225-45c1-92be-060181fceb9b service nova] Releasing lock "refresh_cache-eafa3522-51e3-4582-b060-3e3ac4224ae2" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1295.445684] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Reconfiguring VM instance to enable vnc on port - 5902 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 1295.446225] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d182ae4-1a09-497e-930b-14a2e6d45b19 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.462900] env[61868]: DEBUG oslo_vmware.api [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Waiting for the task: (returnval){ [ 1295.462900] env[61868]: value = "task-41093" [ 1295.462900] env[61868]: _type = "Task" [ 1295.462900] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.473608] env[61868]: DEBUG oslo_vmware.api [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Task: {'id': task-41093, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.973443] env[61868]: DEBUG oslo_vmware.api [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Task: {'id': task-41093, 'name': ReconfigVM_Task, 'duration_secs': 0.10456} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.973802] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Reconfigured VM instance to enable vnc on port - 5902 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 1295.974024] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.566s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1295.974318] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1295.974507] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1295.974879] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1295.975178] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-227d42b0-897b-46df-a0b9-ba240f7051a8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.979895] env[61868]: DEBUG oslo_vmware.api [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Waiting for the task: (returnval){ [ 1295.979895] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]527f1558-438f-261d-f685-84f62da98993" [ 1295.979895] env[61868]: _type = "Task" [ 1295.979895] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.988314] env[61868]: DEBUG oslo_vmware.api [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]527f1558-438f-261d-f685-84f62da98993, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.277551] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1960ff2e-d634-4700-81a6-be7eff7edad6 tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Acquiring lock "eafa3522-51e3-4582-b060-3e3ac4224ae2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1296.490580] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1296.490846] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1296.491072] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1305.352029] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1309.351027] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1309.351375] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1309.351375] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 1309.371880] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1309.372107] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1309.372224] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1309.372361] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1309.372468] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1309.372582] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1309.372691] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1309.372808] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1309.372924] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1309.373038] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1309.373156] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 1310.368667] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1311.352286] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1311.352472] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1312.352019] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1313.351988] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1313.362562] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1313.362858] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1313.362974] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1313.363107] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1313.364348] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d5c18a-ca4b-4894-bb3d-149ffac1f242 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.373317] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f2b36ba-faf6-476f-8468-d69f702eb3a5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.387757] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03379c1a-e4bb-48e8-9216-82b74460a40d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.394359] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd8c8e96-0d7b-4b06-a6d4-26ceb7a37c50 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.427612] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181926MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1313.427716] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1313.427876] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1313.492213] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 972ab1c7-03b0-4294-930c-8084674083ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1313.492382] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d6ac9ed4-56dd-493a-8d9f-0cfad210b6de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1313.492515] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 3394162c-605f-40a1-9dc8-dc5cba6a083f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1313.492640] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 26f77431-9a5d-444d-b345-10108c34b59b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1313.492761] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance a8e7708c-b9ee-465b-8df8-798983c6f06c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1313.492882] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance a7371133-1ff3-4016-84fc-a59a9ef6d445 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1313.493002] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance b2dbce45-4bfa-4356-b608-e44e5a15c081 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1313.493118] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1313.493233] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance f7e1c6c5-752e-4fef-b84f-232b2dbee4a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1313.493346] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance eafa3522-51e3-4582-b060-3e3ac4224ae2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1313.503760] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance c9f74904-0558-42e6-a454-c7103b2873b1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1313.513673] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance efb3f108-d3b3-4ebf-a51f-84dc8274f857 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1313.523700] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ebd4070e-7944-4d2f-8668-01d0ceca0c67 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1313.532970] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d6f7828e-6617-40ca-9f6c-e3a72c328dc9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1313.542649] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 185855b4-f378-45b6-9603-081b3c1b2c71 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1313.551930] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 7c578516-d2b7-4b5e-aaac-5831d2262c44 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1313.562526] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 189f5d4f-7a0e-4d49-a0c9-04e886b35383 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1313.572386] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8e01cf72-2e56-493d-8723-2e51398a7697 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1313.581823] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 95ba0df5-846c-4e5d-94e6-fd9c43dcc191 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1313.582088] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1313.582239] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1313.806973] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0bf196a-6239-40ce-8ce0-7e404a5116a1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.815250] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1a83b50-271c-4036-8765-88011c0addc3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.848557] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21510dcf-188c-4a5c-87f4-815b23792650 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.862848] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c5fd2c-17a2-4d4b-b58a-58013db4a951 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.877040] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1313.885337] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1313.903737] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1313.903946] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.476s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1317.904635] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1317.905024] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 1318.351509] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1324.842277] env[61868]: WARNING oslo_vmware.rw_handles [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1324.842277] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1324.842277] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1324.842277] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1324.842277] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1324.842277] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 1324.842277] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1324.842277] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1324.842277] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1324.842277] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1324.842277] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1324.842277] env[61868]: ERROR oslo_vmware.rw_handles [ 1324.842852] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/c0682214-982a-4e68-9a14-f8305774a6ef/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1324.844597] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1324.844906] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Copying Virtual Disk [datastore2] vmware_temp/c0682214-982a-4e68-9a14-f8305774a6ef/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/c0682214-982a-4e68-9a14-f8305774a6ef/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1324.845260] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eb78b990-36bd-4151-8ab8-dcc812644727 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.854123] env[61868]: DEBUG oslo_vmware.api [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Waiting for the task: (returnval){ [ 1324.854123] env[61868]: value = "task-41094" [ 1324.854123] env[61868]: _type = "Task" [ 1324.854123] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.863001] env[61868]: DEBUG oslo_vmware.api [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Task: {'id': task-41094, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.365039] env[61868]: DEBUG oslo_vmware.exceptions [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1325.365337] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1325.365889] env[61868]: ERROR nova.compute.manager [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1325.365889] env[61868]: Faults: ['InvalidArgument'] [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Traceback (most recent call last): [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] yield resources [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] self.driver.spawn(context, instance, image_meta, [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] self._fetch_image_if_missing(context, vi) [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] image_cache(vi, tmp_image_ds_loc) [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] vm_util.copy_virtual_disk( [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] session._wait_for_task(vmdk_copy_task) [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] return self.wait_for_task(task_ref) [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] return evt.wait() [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] result = hub.switch() [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] return self.greenlet.switch() [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] self.f(*self.args, **self.kw) [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] raise exceptions.translate_fault(task_info.error) [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Faults: ['InvalidArgument'] [ 1325.365889] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] [ 1325.366883] env[61868]: INFO nova.compute.manager [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Terminating instance [ 1325.367729] env[61868]: DEBUG oslo_concurrency.lockutils [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1325.367934] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1325.368221] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e0f40e9-585f-479d-b397-b5b57517d348 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.370675] env[61868]: DEBUG nova.compute.manager [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1325.370898] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1325.371625] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9019e39f-0e82-4cfc-8e9e-46ae79649df7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.378206] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1325.378414] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca939d50-8f17-4883-8f6d-1d6c2bde3de6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.380664] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1325.380895] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1325.381810] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05be700d-a6c7-4bd7-823e-b5bf2c16f89c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.386645] env[61868]: DEBUG oslo_vmware.api [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Waiting for the task: (returnval){ [ 1325.386645] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52d94df0-68c0-1368-3a66-f41139d724d0" [ 1325.386645] env[61868]: _type = "Task" [ 1325.386645] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.398246] env[61868]: DEBUG oslo_vmware.api [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52d94df0-68c0-1368-3a66-f41139d724d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.458465] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1325.460025] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1325.460025] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Deleting the datastore file [datastore2] 972ab1c7-03b0-4294-930c-8084674083ba {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1325.460237] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f3cd268f-6126-4e38-9836-bee45485361c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.466928] env[61868]: DEBUG oslo_vmware.api [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Waiting for the task: (returnval){ [ 1325.466928] env[61868]: value = "task-41096" [ 1325.466928] env[61868]: _type = "Task" [ 1325.466928] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.476274] env[61868]: DEBUG oslo_vmware.api [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Task: {'id': task-41096, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.897263] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1325.897527] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Creating directory with path [datastore2] vmware_temp/6978b1c5-7d2d-4f3b-a06c-433918f2ca33/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1325.897758] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6909f854-e46a-4bd2-bfc1-179b553d03cb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.908670] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Created directory with path [datastore2] vmware_temp/6978b1c5-7d2d-4f3b-a06c-433918f2ca33/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1325.908925] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Fetch image to [datastore2] vmware_temp/6978b1c5-7d2d-4f3b-a06c-433918f2ca33/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1325.909114] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/6978b1c5-7d2d-4f3b-a06c-433918f2ca33/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1325.909826] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a21f9a3-5207-453b-b067-c97b4eaf10e4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.916719] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a2ddf1-3d9d-48ff-a7c0-b2f0257b03a9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.925577] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc05e85e-5c0f-48ce-8789-e3e800599b2a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.958953] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51582528-f224-413b-b72d-f94ae4d6bca6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.965542] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-198a842e-48d0-4429-b971-4e081b763d85 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.975502] env[61868]: DEBUG oslo_vmware.api [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Task: {'id': task-41096, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.06812} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.975745] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1325.975926] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1325.976168] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1325.976390] env[61868]: INFO nova.compute.manager [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1325.978561] env[61868]: DEBUG nova.compute.claims [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1325.978774] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1325.978943] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1325.993221] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1326.161473] env[61868]: DEBUG oslo_concurrency.lockutils [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1326.163179] env[61868]: ERROR nova.compute.manager [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 790b1826-10c3-4b26-ad5d-ce8b36354025. [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Traceback (most recent call last): [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] result = getattr(controller, method)(*args, **kwargs) [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] return self._get(image_id) [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] resp, body = self.http_client.get(url, headers=header) [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] return self.request(url, 'GET', **kwargs) [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] return self._handle_response(resp) [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] raise exc.from_response(resp, resp.content) [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] During handling of the above exception, another exception occurred: [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Traceback (most recent call last): [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] yield resources [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] self.driver.spawn(context, instance, image_meta, [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] self._fetch_image_if_missing(context, vi) [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] image_fetch(context, vi, tmp_image_ds_loc) [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] images.fetch_image( [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1326.163179] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] metadata = IMAGE_API.get(context, image_ref) [ 1326.164157] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1326.164157] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] return session.show(context, image_id, [ 1326.164157] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1326.164157] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] _reraise_translated_image_exception(image_id) [ 1326.164157] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1326.164157] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] raise new_exc.with_traceback(exc_trace) [ 1326.164157] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1326.164157] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1326.164157] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1326.164157] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] result = getattr(controller, method)(*args, **kwargs) [ 1326.164157] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1326.164157] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] return self._get(image_id) [ 1326.164157] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1326.164157] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1326.164157] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1326.164157] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] resp, body = self.http_client.get(url, headers=header) [ 1326.164157] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1326.164157] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] return self.request(url, 'GET', **kwargs) [ 1326.164157] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1326.164157] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] return self._handle_response(resp) [ 1326.164157] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1326.164157] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] raise exc.from_response(resp, resp.content) [ 1326.164157] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] nova.exception.ImageNotAuthorized: Not authorized for image 790b1826-10c3-4b26-ad5d-ce8b36354025. [ 1326.164157] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] [ 1326.164157] env[61868]: INFO nova.compute.manager [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Terminating instance [ 1326.165092] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1326.165297] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1326.166249] env[61868]: DEBUG oslo_concurrency.lockutils [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquiring lock "refresh_cache-d6ac9ed4-56dd-493a-8d9f-0cfad210b6de" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1326.166249] env[61868]: DEBUG oslo_concurrency.lockutils [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquired lock "refresh_cache-d6ac9ed4-56dd-493a-8d9f-0cfad210b6de" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1326.166361] env[61868]: DEBUG nova.network.neutron [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1326.167281] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-059cd89f-8961-407a-ba1c-0a8dd4ade6b3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.178838] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1326.179316] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1326.179989] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85c1c2f8-9320-4fcd-b3f9-a921c34cca8d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.190547] env[61868]: DEBUG oslo_vmware.api [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Waiting for the task: (returnval){ [ 1326.190547] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]529076cf-1e1d-8730-4415-b169f50389a0" [ 1326.190547] env[61868]: _type = "Task" [ 1326.190547] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.199536] env[61868]: DEBUG oslo_vmware.api [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]529076cf-1e1d-8730-4415-b169f50389a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.200036] env[61868]: DEBUG nova.network.neutron [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1326.227272] env[61868]: DEBUG nova.network.neutron [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1326.236696] env[61868]: DEBUG oslo_concurrency.lockutils [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Releasing lock "refresh_cache-d6ac9ed4-56dd-493a-8d9f-0cfad210b6de" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1326.237365] env[61868]: DEBUG nova.compute.manager [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1326.237608] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1326.238740] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8979a1f-4b11-4e51-a49d-8ffa49c0b150 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.249636] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1326.249940] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1648dc5d-a260-46de-b13a-9173bb2c7ffb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.279348] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1326.279876] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1326.280227] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Deleting the datastore file [datastore2] d6ac9ed4-56dd-493a-8d9f-0cfad210b6de {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1326.280731] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-60b1a123-73ce-4e47-b95c-9b4231e7a484 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.290826] env[61868]: DEBUG oslo_vmware.api [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Waiting for the task: (returnval){ [ 1326.290826] env[61868]: value = "task-41098" [ 1326.290826] env[61868]: _type = "Task" [ 1326.290826] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.300933] env[61868]: DEBUG oslo_vmware.api [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Task: {'id': task-41098, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.309118] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac869cb-1e3e-4257-aa9e-36aa4c3c68fd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.316546] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f8e605-a01c-4664-8d01-437ba91c4d00 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.348672] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59eab586-abb1-47ac-a236-cc24bac1ee78 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.356461] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-472b9532-a681-4a57-8215-01de8b10f666 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.371116] env[61868]: DEBUG nova.compute.provider_tree [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1326.379908] env[61868]: DEBUG nova.scheduler.client.report [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1326.398576] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.419s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1326.399441] env[61868]: ERROR nova.compute.manager [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1326.399441] env[61868]: Faults: ['InvalidArgument'] [ 1326.399441] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Traceback (most recent call last): [ 1326.399441] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1326.399441] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] self.driver.spawn(context, instance, image_meta, [ 1326.399441] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1326.399441] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1326.399441] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1326.399441] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] self._fetch_image_if_missing(context, vi) [ 1326.399441] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1326.399441] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] image_cache(vi, tmp_image_ds_loc) [ 1326.399441] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1326.399441] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] vm_util.copy_virtual_disk( [ 1326.399441] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1326.399441] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] session._wait_for_task(vmdk_copy_task) [ 1326.399441] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1326.399441] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] return self.wait_for_task(task_ref) [ 1326.399441] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1326.399441] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] return evt.wait() [ 1326.399441] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1326.399441] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] result = hub.switch() [ 1326.399441] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1326.399441] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] return self.greenlet.switch() [ 1326.399441] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1326.399441] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] self.f(*self.args, **self.kw) [ 1326.399441] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1326.399441] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] raise exceptions.translate_fault(task_info.error) [ 1326.399441] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1326.399441] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Faults: ['InvalidArgument'] [ 1326.399441] env[61868]: ERROR nova.compute.manager [instance: 972ab1c7-03b0-4294-930c-8084674083ba] [ 1326.401003] env[61868]: DEBUG nova.compute.utils [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1326.402623] env[61868]: DEBUG nova.compute.manager [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Build of instance 972ab1c7-03b0-4294-930c-8084674083ba was re-scheduled: A specified parameter was not correct: fileType [ 1326.402623] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1326.403013] env[61868]: DEBUG nova.compute.manager [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1326.403367] env[61868]: DEBUG nova.compute.manager [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1326.403437] env[61868]: DEBUG nova.compute.manager [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1326.403557] env[61868]: DEBUG nova.network.neutron [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1326.667820] env[61868]: DEBUG nova.network.neutron [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1326.682903] env[61868]: INFO nova.compute.manager [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Took 0.28 seconds to deallocate network for instance. [ 1326.702064] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1326.702352] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Creating directory with path [datastore2] vmware_temp/3523d263-4c82-4ed3-b0fe-23adf1708e7e/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1326.702596] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-06202768-d76a-4097-bcda-96eae8e0e06b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.715893] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Created directory with path [datastore2] vmware_temp/3523d263-4c82-4ed3-b0fe-23adf1708e7e/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1326.716278] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Fetch image to [datastore2] vmware_temp/3523d263-4c82-4ed3-b0fe-23adf1708e7e/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1326.716611] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/3523d263-4c82-4ed3-b0fe-23adf1708e7e/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1326.717499] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a27497df-4334-41da-947a-10860a6a25c0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.730524] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15041063-9a62-4fa5-b340-38bba0653468 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.742232] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bfeab3f-0a5d-4934-853a-8156634f8d09 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.779021] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eef26fb-21b2-4eff-9ab2-8c036c13f234 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.787693] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c9184325-9e4e-4229-8a9d-6589e13d2624 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.803337] env[61868]: DEBUG oslo_vmware.api [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Task: {'id': task-41098, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.031841} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.803337] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1326.803337] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1326.803337] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1326.805525] env[61868]: INFO nova.compute.manager [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Took 0.57 seconds to destroy the instance on the hypervisor. [ 1326.805826] env[61868]: DEBUG oslo.service.loopingcall [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1326.806029] env[61868]: DEBUG nova.compute.manager [-] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1326.806137] env[61868]: DEBUG nova.network.neutron [-] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1326.812174] env[61868]: INFO nova.scheduler.client.report [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Deleted allocations for instance 972ab1c7-03b0-4294-930c-8084674083ba [ 1326.818633] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1326.838546] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1a0eceb3-0800-4c89-8366-abc4ca18cc0e tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Lock "972ab1c7-03b0-4294-930c-8084674083ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 688.823s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1326.842541] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9aac3b3d-511a-4120-b574-64f985810788 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Lock "972ab1c7-03b0-4294-930c-8084674083ba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 490.624s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1326.842541] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9aac3b3d-511a-4120-b574-64f985810788 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquiring lock "972ab1c7-03b0-4294-930c-8084674083ba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1326.842861] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9aac3b3d-511a-4120-b574-64f985810788 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Lock "972ab1c7-03b0-4294-930c-8084674083ba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1326.842861] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9aac3b3d-511a-4120-b574-64f985810788 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Lock "972ab1c7-03b0-4294-930c-8084674083ba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1326.844626] env[61868]: INFO nova.compute.manager [None req-9aac3b3d-511a-4120-b574-64f985810788 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Terminating instance [ 1326.846197] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9aac3b3d-511a-4120-b574-64f985810788 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquiring lock "refresh_cache-972ab1c7-03b0-4294-930c-8084674083ba" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1326.846357] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9aac3b3d-511a-4120-b574-64f985810788 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquired lock "refresh_cache-972ab1c7-03b0-4294-930c-8084674083ba" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1326.846522] env[61868]: DEBUG nova.network.neutron [None req-9aac3b3d-511a-4120-b574-64f985810788 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1326.863084] env[61868]: DEBUG nova.compute.manager [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1326.924052] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1326.924335] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1326.925826] env[61868]: INFO nova.compute.claims [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1326.929132] env[61868]: DEBUG oslo_vmware.rw_handles [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3523d263-4c82-4ed3-b0fe-23adf1708e7e/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1326.992458] env[61868]: DEBUG nova.network.neutron [None req-9aac3b3d-511a-4120-b574-64f985810788 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1326.994908] env[61868]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61868) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1326.995150] env[61868]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-1722b31a-355a-4e65-b5df-a3d1faa6db99'] [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1326.996696] env[61868]: ERROR oslo.service.loopingcall [ 1326.999571] env[61868]: ERROR nova.compute.manager [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1327.004894] env[61868]: DEBUG oslo_vmware.rw_handles [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1327.005068] env[61868]: DEBUG oslo_vmware.rw_handles [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3523d263-4c82-4ed3-b0fe-23adf1708e7e/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1327.039101] env[61868]: WARNING nova.compute.manager [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Could not clean up failed build, not rescheduling. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1327.039350] env[61868]: DEBUG nova.compute.claims [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1327.039511] env[61868]: DEBUG oslo_concurrency.lockutils [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1327.069413] env[61868]: DEBUG nova.network.neutron [None req-9aac3b3d-511a-4120-b574-64f985810788 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1327.081490] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9aac3b3d-511a-4120-b574-64f985810788 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Releasing lock "refresh_cache-972ab1c7-03b0-4294-930c-8084674083ba" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1327.082146] env[61868]: DEBUG nova.compute.manager [None req-9aac3b3d-511a-4120-b574-64f985810788 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1327.082452] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9aac3b3d-511a-4120-b574-64f985810788 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1327.083062] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9614343d-dd75-473a-8e05-01578c040675 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.095465] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e06b6c4-05ce-427f-bc6b-3ef5aa083178 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.126040] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-9aac3b3d-511a-4120-b574-64f985810788 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 972ab1c7-03b0-4294-930c-8084674083ba could not be found. [ 1327.126419] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9aac3b3d-511a-4120-b574-64f985810788 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1327.126699] env[61868]: INFO nova.compute.manager [None req-9aac3b3d-511a-4120-b574-64f985810788 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1327.127064] env[61868]: DEBUG oslo.service.loopingcall [None req-9aac3b3d-511a-4120-b574-64f985810788 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1327.130130] env[61868]: DEBUG nova.compute.manager [-] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1327.130347] env[61868]: DEBUG nova.network.neutron [-] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1327.152218] env[61868]: DEBUG nova.network.neutron [-] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1327.161362] env[61868]: DEBUG nova.network.neutron [-] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1327.170116] env[61868]: INFO nova.compute.manager [-] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] Took 0.04 seconds to deallocate network for instance. [ 1327.275062] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9aac3b3d-511a-4120-b574-64f985810788 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Lock "972ab1c7-03b0-4294-930c-8084674083ba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.433s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1327.276469] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "972ab1c7-03b0-4294-930c-8084674083ba" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 177.617s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1327.276656] env[61868]: INFO nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 972ab1c7-03b0-4294-930c-8084674083ba] During sync_power_state the instance has a pending task (deleting). Skip. [ 1327.276941] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "972ab1c7-03b0-4294-930c-8084674083ba" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1327.308257] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea9ae35b-1083-4b8b-9b60-fc503cb1943b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.316071] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f5c463-c071-46bd-bcfa-e5031e652cad {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.350770] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe5aa4bc-ffee-4721-a653-d27662aee2be {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.359604] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-641203b6-722a-4695-bc43-9edf3ac0f52b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.373385] env[61868]: DEBUG nova.compute.provider_tree [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1327.382308] env[61868]: DEBUG nova.scheduler.client.report [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1327.399842] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.475s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1327.400372] env[61868]: DEBUG nova.compute.manager [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1327.403338] env[61868]: DEBUG oslo_concurrency.lockutils [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.364s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1327.436445] env[61868]: DEBUG nova.compute.utils [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1327.440579] env[61868]: DEBUG nova.compute.manager [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1327.440740] env[61868]: DEBUG nova.network.neutron [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1327.446334] env[61868]: DEBUG nova.compute.manager [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1327.473159] env[61868]: DEBUG nova.network.neutron [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] No network configured {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1188}} [ 1327.473337] env[61868]: DEBUG nova.compute.manager [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Instance network_info: |[]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1327.521292] env[61868]: DEBUG nova.compute.manager [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1327.545375] env[61868]: DEBUG nova.virt.hardware [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1327.545652] env[61868]: DEBUG nova.virt.hardware [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1327.545811] env[61868]: DEBUG nova.virt.hardware [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1327.546000] env[61868]: DEBUG nova.virt.hardware [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1327.546447] env[61868]: DEBUG nova.virt.hardware [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1327.546621] env[61868]: DEBUG nova.virt.hardware [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1327.546833] env[61868]: DEBUG nova.virt.hardware [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1327.546997] env[61868]: DEBUG nova.virt.hardware [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1327.547160] env[61868]: DEBUG nova.virt.hardware [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1327.547323] env[61868]: DEBUG nova.virt.hardware [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1327.547498] env[61868]: DEBUG nova.virt.hardware [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1327.548397] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1a0096e-deda-4b26-898a-8d5ec19b87db {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.557934] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f7a71c0-c99c-4972-b6c9-719d85571917 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.571881] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Instance VIF info [] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1327.577315] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Creating folder: Project (90a2574d01974d66bfeac6a84b08f27d). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1327.579956] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-76c55fc3-e706-41c1-b89d-2b563d61517b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.592876] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Created folder: Project (90a2574d01974d66bfeac6a84b08f27d) in parent group-v18181. [ 1327.593092] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Creating folder: Instances. Parent ref: group-v18273. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1327.595688] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a8c70231-d1dc-43c5-bec6-4d91bc7bf655 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.605393] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Created folder: Instances in parent group-v18273. [ 1327.605648] env[61868]: DEBUG oslo.service.loopingcall [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1327.605842] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1327.606051] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bfed2efb-c6aa-46ff-b702-e8f60bfbbbfd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.634149] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1327.634149] env[61868]: value = "task-41101" [ 1327.634149] env[61868]: _type = "Task" [ 1327.634149] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.642648] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41101, 'name': CreateVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.728254] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-440d134f-5ecf-42e1-b754-9fb479d01544 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.736272] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c298d61d-1a4a-424b-9df1-f78185220b5a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.769118] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b11181-67f9-4aca-bd00-ea680ca0c560 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.777087] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73f4c5be-d968-4dd1-a7f1-4347ce643a56 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.793000] env[61868]: DEBUG nova.compute.provider_tree [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1327.803130] env[61868]: DEBUG nova.scheduler.client.report [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1327.823335] env[61868]: DEBUG oslo_concurrency.lockutils [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.420s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1327.823636] env[61868]: DEBUG nova.compute.manager [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Build of instance d6ac9ed4-56dd-493a-8d9f-0cfad210b6de aborted: Not authorized for image 790b1826-10c3-4b26-ad5d-ce8b36354025. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2661}} [ 1327.824520] env[61868]: DEBUG nova.compute.utils [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Build of instance d6ac9ed4-56dd-493a-8d9f-0cfad210b6de aborted: Not authorized for image 790b1826-10c3-4b26-ad5d-ce8b36354025. {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1327.826492] env[61868]: ERROR nova.compute.manager [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Build of instance d6ac9ed4-56dd-493a-8d9f-0cfad210b6de aborted: Not authorized for image 790b1826-10c3-4b26-ad5d-ce8b36354025.: nova.exception.BuildAbortException: Build of instance d6ac9ed4-56dd-493a-8d9f-0cfad210b6de aborted: Not authorized for image 790b1826-10c3-4b26-ad5d-ce8b36354025. [ 1327.826715] env[61868]: DEBUG nova.compute.manager [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1327.826967] env[61868]: DEBUG oslo_concurrency.lockutils [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquiring lock "refresh_cache-d6ac9ed4-56dd-493a-8d9f-0cfad210b6de" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1327.827134] env[61868]: DEBUG oslo_concurrency.lockutils [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquired lock "refresh_cache-d6ac9ed4-56dd-493a-8d9f-0cfad210b6de" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1327.827314] env[61868]: DEBUG nova.network.neutron [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1327.854798] env[61868]: DEBUG nova.network.neutron [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1327.888074] env[61868]: DEBUG nova.network.neutron [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1327.898243] env[61868]: DEBUG oslo_concurrency.lockutils [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Releasing lock "refresh_cache-d6ac9ed4-56dd-493a-8d9f-0cfad210b6de" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1327.898442] env[61868]: DEBUG nova.compute.manager [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1327.898669] env[61868]: DEBUG nova.compute.manager [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1327.898879] env[61868]: DEBUG nova.network.neutron [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1327.981730] env[61868]: DEBUG neutronclient.v2_0.client [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61868) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1327.983387] env[61868]: ERROR nova.compute.manager [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Traceback (most recent call last): [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] ret = obj(*args, **kwargs) [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] exception_handler_v20(status_code, error_body) [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] raise client_exc(message=error_message, [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Neutron server returns request_ids: ['req-1722b31a-355a-4e65-b5df-a3d1faa6db99'] [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] During handling of the above exception, another exception occurred: [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Traceback (most recent call last): [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/compute/manager.py", line 2902, in _build_resources [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] self._shutdown_instance(context, instance, [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] self._try_deallocate_network(context, instance, requested_networks) [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] with excutils.save_and_reraise_exception(): [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] self.force_reraise() [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] raise self.value [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] _deallocate_network_with_retries() [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] return evt.wait() [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] result = hub.switch() [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] return self.greenlet.switch() [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] result = func(*self.args, **self.kw) [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] result = f(*args, **kwargs) [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1327.983387] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] self._deallocate_network( [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] self.network_api.deallocate_for_instance( [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] data = neutron.list_ports(**search_opts) [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] ret = obj(*args, **kwargs) [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] return self.list('ports', self.ports_path, retrieve_all, [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] ret = obj(*args, **kwargs) [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] for r in self._pagination(collection, path, **params): [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] res = self.get(path, params=params) [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] ret = obj(*args, **kwargs) [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] return self.retry_request("GET", action, body=body, [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] ret = obj(*args, **kwargs) [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] return self.do_request(method, action, body=body, [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] ret = obj(*args, **kwargs) [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] self._handle_fault_response(status_code, replybody, resp) [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] During handling of the above exception, another exception occurred: [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Traceback (most recent call last): [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/compute/manager.py", line 2447, in _do_build_and_run_instance [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] self._build_and_run_instance(context, instance, image, [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/compute/manager.py", line 2660, in _build_and_run_instance [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] with excutils.save_and_reraise_exception(): [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1327.984916] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] self.force_reraise() [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] raise self.value [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] with self._build_resources(context, instance, [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] self.gen.throw(typ, value, traceback) [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/compute/manager.py", line 2910, in _build_resources [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] raise exception.BuildAbortException( [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] nova.exception.BuildAbortException: Build of instance d6ac9ed4-56dd-493a-8d9f-0cfad210b6de aborted: Not authorized for image 790b1826-10c3-4b26-ad5d-ce8b36354025. [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] During handling of the above exception, another exception occurred: [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Traceback (most recent call last): [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] ret = obj(*args, **kwargs) [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] exception_handler_v20(status_code, error_body) [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] raise client_exc(message=error_message, [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Neutron server returns request_ids: ['req-fb0abc0d-ac4d-4756-b3a6-9f833aac55cf'] [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] During handling of the above exception, another exception occurred: [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Traceback (most recent call last): [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/compute/manager.py", line 3036, in _cleanup_allocated_networks [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] self._deallocate_network(context, instance, requested_networks) [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] self.network_api.deallocate_for_instance( [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] data = neutron.list_ports(**search_opts) [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] ret = obj(*args, **kwargs) [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] return self.list('ports', self.ports_path, retrieve_all, [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] ret = obj(*args, **kwargs) [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1327.986185] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] for r in self._pagination(collection, path, **params): [ 1327.987240] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1327.987240] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] res = self.get(path, params=params) [ 1327.987240] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.987240] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] ret = obj(*args, **kwargs) [ 1327.987240] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1327.987240] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] return self.retry_request("GET", action, body=body, [ 1327.987240] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.987240] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] ret = obj(*args, **kwargs) [ 1327.987240] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1327.987240] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] return self.do_request(method, action, body=body, [ 1327.987240] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.987240] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] ret = obj(*args, **kwargs) [ 1327.987240] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1327.987240] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] self._handle_fault_response(status_code, replybody, resp) [ 1327.987240] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1327.987240] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] raise exception.Unauthorized() [ 1327.987240] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] nova.exception.Unauthorized: Not authorized. [ 1327.987240] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] [ 1328.071165] env[61868]: INFO nova.scheduler.client.report [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Deleted allocations for instance d6ac9ed4-56dd-493a-8d9f-0cfad210b6de [ 1328.071525] env[61868]: DEBUG oslo_concurrency.lockutils [None req-529fcb34-cd83-42d4-942a-3861a1df198f tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Lock "d6ac9ed4-56dd-493a-8d9f-0cfad210b6de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 590.069s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1328.072708] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6660c933-17c0-4b20-8de3-e646a8574d54 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Lock "d6ac9ed4-56dd-493a-8d9f-0cfad210b6de" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 393.937s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1328.072920] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6660c933-17c0-4b20-8de3-e646a8574d54 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquiring lock "d6ac9ed4-56dd-493a-8d9f-0cfad210b6de-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1328.073191] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6660c933-17c0-4b20-8de3-e646a8574d54 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Lock "d6ac9ed4-56dd-493a-8d9f-0cfad210b6de-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1328.073366] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6660c933-17c0-4b20-8de3-e646a8574d54 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Lock "d6ac9ed4-56dd-493a-8d9f-0cfad210b6de-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1328.075205] env[61868]: INFO nova.compute.manager [None req-6660c933-17c0-4b20-8de3-e646a8574d54 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Terminating instance [ 1328.076885] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6660c933-17c0-4b20-8de3-e646a8574d54 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquiring lock "refresh_cache-d6ac9ed4-56dd-493a-8d9f-0cfad210b6de" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1328.077031] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6660c933-17c0-4b20-8de3-e646a8574d54 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Acquired lock "refresh_cache-d6ac9ed4-56dd-493a-8d9f-0cfad210b6de" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1328.077189] env[61868]: DEBUG nova.network.neutron [None req-6660c933-17c0-4b20-8de3-e646a8574d54 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1328.083889] env[61868]: DEBUG nova.compute.manager [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1328.111405] env[61868]: DEBUG nova.network.neutron [None req-6660c933-17c0-4b20-8de3-e646a8574d54 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1328.141620] env[61868]: DEBUG nova.network.neutron [None req-6660c933-17c0-4b20-8de3-e646a8574d54 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1328.147275] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1328.147493] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1328.148952] env[61868]: INFO nova.compute.claims [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1328.151866] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41101, 'name': CreateVM_Task, 'duration_secs': 0.29055} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.152808] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1328.153193] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6660c933-17c0-4b20-8de3-e646a8574d54 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Releasing lock "refresh_cache-d6ac9ed4-56dd-493a-8d9f-0cfad210b6de" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1328.153547] env[61868]: DEBUG nova.compute.manager [None req-6660c933-17c0-4b20-8de3-e646a8574d54 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1328.153739] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6660c933-17c0-4b20-8de3-e646a8574d54 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1328.154121] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1328.154332] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1328.154749] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cdf7e16c-db75-4b3e-9574-dcfd82780223 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.159512] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e1bbbe2-4f88-449c-995a-9d8b3a2fc646 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.177035] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a1bd4f-9dff-41da-813a-066094bb2204 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.211229] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Reconfiguring VM instance to enable vnc on port - 5904 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 1328.212666] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-687589ea-97a7-4151-9b47-6e30e009a82b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.236685] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-6660c933-17c0-4b20-8de3-e646a8574d54 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d6ac9ed4-56dd-493a-8d9f-0cfad210b6de could not be found. [ 1328.236918] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6660c933-17c0-4b20-8de3-e646a8574d54 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1328.237065] env[61868]: INFO nova.compute.manager [None req-6660c933-17c0-4b20-8de3-e646a8574d54 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Took 0.08 seconds to destroy the instance on the hypervisor. [ 1328.237310] env[61868]: DEBUG oslo.service.loopingcall [None req-6660c933-17c0-4b20-8de3-e646a8574d54 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1328.238056] env[61868]: DEBUG nova.compute.manager [-] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1328.238169] env[61868]: DEBUG nova.network.neutron [-] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1328.246347] env[61868]: DEBUG oslo_vmware.api [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Waiting for the task: (returnval){ [ 1328.246347] env[61868]: value = "task-41102" [ 1328.246347] env[61868]: _type = "Task" [ 1328.246347] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.257968] env[61868]: DEBUG oslo_vmware.api [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Task: {'id': task-41102, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.331407] env[61868]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61868) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1328.331685] env[61868]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-16240aa4-e258-4995-897c-babd93c11fa9'] [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1328.332211] env[61868]: ERROR oslo.service.loopingcall [ 1328.333684] env[61868]: ERROR nova.compute.manager [None req-6660c933-17c0-4b20-8de3-e646a8574d54 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1328.363848] env[61868]: ERROR nova.compute.manager [None req-6660c933-17c0-4b20-8de3-e646a8574d54 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Traceback (most recent call last): [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] ret = obj(*args, **kwargs) [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] exception_handler_v20(status_code, error_body) [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] raise client_exc(message=error_message, [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Neutron server returns request_ids: ['req-16240aa4-e258-4995-897c-babd93c11fa9'] [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] During handling of the above exception, another exception occurred: [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Traceback (most recent call last): [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] self._delete_instance(context, instance, bdms) [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] self._shutdown_instance(context, instance, bdms) [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] self._try_deallocate_network(context, instance, requested_networks) [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] with excutils.save_and_reraise_exception(): [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] self.force_reraise() [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] raise self.value [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] _deallocate_network_with_retries() [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] return evt.wait() [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] result = hub.switch() [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] return self.greenlet.switch() [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] result = func(*self.args, **self.kw) [ 1328.363848] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] result = f(*args, **kwargs) [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] self._deallocate_network( [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] self.network_api.deallocate_for_instance( [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] data = neutron.list_ports(**search_opts) [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] ret = obj(*args, **kwargs) [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] return self.list('ports', self.ports_path, retrieve_all, [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] ret = obj(*args, **kwargs) [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] for r in self._pagination(collection, path, **params): [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] res = self.get(path, params=params) [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] ret = obj(*args, **kwargs) [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] return self.retry_request("GET", action, body=body, [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] ret = obj(*args, **kwargs) [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] return self.do_request(method, action, body=body, [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] ret = obj(*args, **kwargs) [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] self._handle_fault_response(status_code, replybody, resp) [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1328.365013] env[61868]: ERROR nova.compute.manager [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] [ 1328.393368] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6660c933-17c0-4b20-8de3-e646a8574d54 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Lock "d6ac9ed4-56dd-493a-8d9f-0cfad210b6de" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.320s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1328.394570] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "d6ac9ed4-56dd-493a-8d9f-0cfad210b6de" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 178.735s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1328.394774] env[61868]: INFO nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] During sync_power_state the instance has a pending task (deleting). Skip. [ 1328.394956] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "d6ac9ed4-56dd-493a-8d9f-0cfad210b6de" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1328.455847] env[61868]: INFO nova.compute.manager [None req-6660c933-17c0-4b20-8de3-e646a8574d54 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] [instance: d6ac9ed4-56dd-493a-8d9f-0cfad210b6de] Successfully reverted task state from None on failure for instance. [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server [None req-6660c933-17c0-4b20-8de3-e646a8574d54 tempest-DeleteServersAdminTestJSON-524356288 tempest-DeleteServersAdminTestJSON-524356288-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-16240aa4-e258-4995-897c-babd93c11fa9'] [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server raise self.value [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server raise self.value [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server raise self.value [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 414, in inner [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1328.459682] env[61868]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server raise self.value [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server raise self.value [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1328.461205] env[61868]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1328.462745] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1328.462745] env[61868]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1328.462745] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1328.462745] env[61868]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1328.462745] env[61868]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1328.462745] env[61868]: ERROR oslo_messaging.rpc.server [ 1328.501077] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e235213-99e4-406d-a672-b203328dc7bf {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.509195] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca2ec136-4151-437c-a81f-b22a4480133c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.541610] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4957b077-824d-4e75-a296-b313d2b04e30 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.549332] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-239a0dd9-b694-45d1-ab4b-875edd5e0dd2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.562911] env[61868]: DEBUG nova.compute.provider_tree [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1328.572047] env[61868]: DEBUG nova.scheduler.client.report [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1328.590493] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.443s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1328.591079] env[61868]: DEBUG nova.compute.manager [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1328.625299] env[61868]: DEBUG nova.compute.utils [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1328.626863] env[61868]: DEBUG nova.compute.manager [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1328.627246] env[61868]: DEBUG nova.network.neutron [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1328.639337] env[61868]: DEBUG nova.compute.manager [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1328.679995] env[61868]: DEBUG nova.policy [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4c7bd4ab5fb746f9870fdda80039e74e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8102964b3ed34e73be155f4985ba8c27', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 1328.711368] env[61868]: DEBUG nova.compute.manager [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1328.732684] env[61868]: DEBUG nova.virt.hardware [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1328.732926] env[61868]: DEBUG nova.virt.hardware [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1328.733157] env[61868]: DEBUG nova.virt.hardware [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1328.733350] env[61868]: DEBUG nova.virt.hardware [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1328.733510] env[61868]: DEBUG nova.virt.hardware [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1328.733744] env[61868]: DEBUG nova.virt.hardware [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1328.733899] env[61868]: DEBUG nova.virt.hardware [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1328.734071] env[61868]: DEBUG nova.virt.hardware [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1328.734240] env[61868]: DEBUG nova.virt.hardware [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1328.734403] env[61868]: DEBUG nova.virt.hardware [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1328.734569] env[61868]: DEBUG nova.virt.hardware [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1328.735437] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-913a107e-4f42-4394-aa41-78d68ef9a2f3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.744201] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe73f462-888a-4b05-a7d1-9e8a913fb3b1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.764565] env[61868]: DEBUG oslo_vmware.api [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Task: {'id': task-41102, 'name': ReconfigVM_Task, 'duration_secs': 0.116625} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.764833] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Reconfigured VM instance to enable vnc on port - 5904 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 1328.765045] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.611s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1328.765287] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1328.765430] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1328.765754] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1328.766028] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07626dd8-0999-4661-830f-a4db774e2360 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.770823] env[61868]: DEBUG oslo_vmware.api [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Waiting for the task: (returnval){ [ 1328.770823] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]522e0ee9-d840-c77d-fe70-9c61d1a99d58" [ 1328.770823] env[61868]: _type = "Task" [ 1328.770823] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.787129] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1328.787391] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1328.787710] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1328.960427] env[61868]: DEBUG nova.network.neutron [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Successfully created port: 39122a3d-a49b-4abc-abfc-aa7bec83e93c {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1329.601267] env[61868]: DEBUG nova.compute.manager [req-04361520-d256-469a-9770-7aa767a3982d req-8af82ae5-c8f4-40ea-8437-fd29ced6b14b service nova] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Received event network-vif-plugged-39122a3d-a49b-4abc-abfc-aa7bec83e93c {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1329.601521] env[61868]: DEBUG oslo_concurrency.lockutils [req-04361520-d256-469a-9770-7aa767a3982d req-8af82ae5-c8f4-40ea-8437-fd29ced6b14b service nova] Acquiring lock "efb3f108-d3b3-4ebf-a51f-84dc8274f857-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1329.601688] env[61868]: DEBUG oslo_concurrency.lockutils [req-04361520-d256-469a-9770-7aa767a3982d req-8af82ae5-c8f4-40ea-8437-fd29ced6b14b service nova] Lock "efb3f108-d3b3-4ebf-a51f-84dc8274f857-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1329.601942] env[61868]: DEBUG oslo_concurrency.lockutils [req-04361520-d256-469a-9770-7aa767a3982d req-8af82ae5-c8f4-40ea-8437-fd29ced6b14b service nova] Lock "efb3f108-d3b3-4ebf-a51f-84dc8274f857-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1329.602124] env[61868]: DEBUG nova.compute.manager [req-04361520-d256-469a-9770-7aa767a3982d req-8af82ae5-c8f4-40ea-8437-fd29ced6b14b service nova] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] No waiting events found dispatching network-vif-plugged-39122a3d-a49b-4abc-abfc-aa7bec83e93c {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1329.602291] env[61868]: WARNING nova.compute.manager [req-04361520-d256-469a-9770-7aa767a3982d req-8af82ae5-c8f4-40ea-8437-fd29ced6b14b service nova] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Received unexpected event network-vif-plugged-39122a3d-a49b-4abc-abfc-aa7bec83e93c for instance with vm_state building and task_state spawning. [ 1329.675484] env[61868]: DEBUG nova.network.neutron [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Successfully updated port: 39122a3d-a49b-4abc-abfc-aa7bec83e93c {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1329.688187] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Acquiring lock "refresh_cache-efb3f108-d3b3-4ebf-a51f-84dc8274f857" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1329.688357] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Acquired lock "refresh_cache-efb3f108-d3b3-4ebf-a51f-84dc8274f857" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1329.688511] env[61868]: DEBUG nova.network.neutron [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1329.728134] env[61868]: DEBUG nova.network.neutron [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1329.886415] env[61868]: DEBUG nova.network.neutron [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Updating instance_info_cache with network_info: [{"id": "39122a3d-a49b-4abc-abfc-aa7bec83e93c", "address": "fa:16:3e:2b:c8:85", "network": {"id": "955180e4-721e-46f1-b312-7ab0bb3b8803", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1887202865-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "8102964b3ed34e73be155f4985ba8c27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39122a3d-a4", "ovs_interfaceid": "39122a3d-a49b-4abc-abfc-aa7bec83e93c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1329.899544] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Releasing lock "refresh_cache-efb3f108-d3b3-4ebf-a51f-84dc8274f857" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1329.899845] env[61868]: DEBUG nova.compute.manager [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Instance network_info: |[{"id": "39122a3d-a49b-4abc-abfc-aa7bec83e93c", "address": "fa:16:3e:2b:c8:85", "network": {"id": "955180e4-721e-46f1-b312-7ab0bb3b8803", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1887202865-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "8102964b3ed34e73be155f4985ba8c27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39122a3d-a4", "ovs_interfaceid": "39122a3d-a49b-4abc-abfc-aa7bec83e93c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1329.900297] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:c8:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '158692b5-b9fb-49e8-9903-e742ffd6c168', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '39122a3d-a49b-4abc-abfc-aa7bec83e93c', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1329.907687] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Creating folder: Project (8102964b3ed34e73be155f4985ba8c27). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1329.908779] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-585d9314-063f-4280-ae6c-b956fb4fbe3b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.910773] env[61868]: DEBUG oslo_concurrency.lockutils [None req-cd44f714-a8c1-47cb-b5f0-45dcc7341a76 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Acquiring lock "c9f74904-0558-42e6-a454-c7103b2873b1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1329.920571] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Created folder: Project (8102964b3ed34e73be155f4985ba8c27) in parent group-v18181. [ 1329.920781] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Creating folder: Instances. Parent ref: group-v18276. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1329.921056] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1b04b58f-1dde-4dfd-8196-b0cfa86d1448 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.931150] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Created folder: Instances in parent group-v18276. [ 1329.931407] env[61868]: DEBUG oslo.service.loopingcall [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1329.931604] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1329.931838] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fa336548-483b-41d7-afc6-dfb0ab321387 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.952494] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1329.952494] env[61868]: value = "task-41105" [ 1329.952494] env[61868]: _type = "Task" [ 1329.952494] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.961618] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41105, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.462350] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41105, 'name': CreateVM_Task, 'duration_secs': 0.31514} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.462576] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1330.463069] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1330.463309] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1330.466121] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2037bd0-1f09-415a-8ebf-21a38488ee41 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.498718] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Reconfiguring VM instance to enable vnc on port - 5905 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 1330.499093] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ddd885d1-9f2f-480a-ba96-f9315cc1436c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.515445] env[61868]: DEBUG oslo_vmware.api [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Waiting for the task: (returnval){ [ 1330.515445] env[61868]: value = "task-41106" [ 1330.515445] env[61868]: _type = "Task" [ 1330.515445] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.524128] env[61868]: DEBUG oslo_vmware.api [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Task: {'id': task-41106, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.026084] env[61868]: DEBUG oslo_vmware.api [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Task: {'id': task-41106, 'name': ReconfigVM_Task, 'duration_secs': 0.111264} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.026432] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Reconfigured VM instance to enable vnc on port - 5905 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 1331.026565] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.563s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1331.026808] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1331.026946] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1331.027265] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1331.027549] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d461e0b6-dd0f-4133-894d-c03fac041c4e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.032523] env[61868]: DEBUG oslo_vmware.api [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Waiting for the task: (returnval){ [ 1331.032523] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]524971b7-346c-dd9e-44f6-0abb52b8004e" [ 1331.032523] env[61868]: _type = "Task" [ 1331.032523] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.040880] env[61868]: DEBUG oslo_vmware.api [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]524971b7-346c-dd9e-44f6-0abb52b8004e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.543272] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1331.543526] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1331.543737] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1331.680692] env[61868]: DEBUG nova.compute.manager [req-c48c1468-5041-406b-ba61-c63ef410e3ea req-25694227-ab49-485c-be53-79c35343f529 service nova] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Received event network-changed-39122a3d-a49b-4abc-abfc-aa7bec83e93c {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1331.680950] env[61868]: DEBUG nova.compute.manager [req-c48c1468-5041-406b-ba61-c63ef410e3ea req-25694227-ab49-485c-be53-79c35343f529 service nova] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Refreshing instance network info cache due to event network-changed-39122a3d-a49b-4abc-abfc-aa7bec83e93c. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1331.681171] env[61868]: DEBUG oslo_concurrency.lockutils [req-c48c1468-5041-406b-ba61-c63ef410e3ea req-25694227-ab49-485c-be53-79c35343f529 service nova] Acquiring lock "refresh_cache-efb3f108-d3b3-4ebf-a51f-84dc8274f857" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1331.681316] env[61868]: DEBUG oslo_concurrency.lockutils [req-c48c1468-5041-406b-ba61-c63ef410e3ea req-25694227-ab49-485c-be53-79c35343f529 service nova] Acquired lock "refresh_cache-efb3f108-d3b3-4ebf-a51f-84dc8274f857" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1331.681477] env[61868]: DEBUG nova.network.neutron [req-c48c1468-5041-406b-ba61-c63ef410e3ea req-25694227-ab49-485c-be53-79c35343f529 service nova] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Refreshing network info cache for port 39122a3d-a49b-4abc-abfc-aa7bec83e93c {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1332.259814] env[61868]: DEBUG nova.network.neutron [req-c48c1468-5041-406b-ba61-c63ef410e3ea req-25694227-ab49-485c-be53-79c35343f529 service nova] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Updated VIF entry in instance network info cache for port 39122a3d-a49b-4abc-abfc-aa7bec83e93c. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1332.260206] env[61868]: DEBUG nova.network.neutron [req-c48c1468-5041-406b-ba61-c63ef410e3ea req-25694227-ab49-485c-be53-79c35343f529 service nova] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Updating instance_info_cache with network_info: [{"id": "39122a3d-a49b-4abc-abfc-aa7bec83e93c", "address": "fa:16:3e:2b:c8:85", "network": {"id": "955180e4-721e-46f1-b312-7ab0bb3b8803", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1887202865-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "8102964b3ed34e73be155f4985ba8c27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39122a3d-a4", "ovs_interfaceid": "39122a3d-a49b-4abc-abfc-aa7bec83e93c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1332.269941] env[61868]: DEBUG oslo_concurrency.lockutils [req-c48c1468-5041-406b-ba61-c63ef410e3ea req-25694227-ab49-485c-be53-79c35343f529 service nova] Releasing lock "refresh_cache-efb3f108-d3b3-4ebf-a51f-84dc8274f857" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1336.236521] env[61868]: DEBUG oslo_concurrency.lockutils [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Acquiring lock "d69cb3f5-b385-432a-b562-87d0b1b0877b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1336.236521] env[61868]: DEBUG oslo_concurrency.lockutils [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Lock "d69cb3f5-b385-432a-b562-87d0b1b0877b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1336.878961] env[61868]: DEBUG oslo_concurrency.lockutils [None req-81602054-c433-4e16-bd4f-521214876c1f tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Acquiring lock "352b5eae-b809-4cb5-8970-ebad9fba78bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1336.879301] env[61868]: DEBUG oslo_concurrency.lockutils [None req-81602054-c433-4e16-bd4f-521214876c1f tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Lock "352b5eae-b809-4cb5-8970-ebad9fba78bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1366.351721] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1371.352501] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1371.352937] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1371.352937] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 1371.373664] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1371.373827] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1371.373959] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1371.374090] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1371.374213] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1371.374336] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1371.374455] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1371.374572] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1371.374687] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1371.374802] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1371.374921] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 1372.369733] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1373.352018] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1373.353479] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1374.346754] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1374.369771] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1374.377785] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1374.387221] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1374.387600] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1374.388367] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1374.388684] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1374.389886] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c519d3b-c94d-4613-b16a-5e76604fee06 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.399816] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144c4fb0-1f6c-4c0d-9804-99510a47f8e0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.418268] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e02d5e64-286e-4032-8cc5-f1d3045daa56 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.425937] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20227c20-33b5-470c-9a40-051805c0b859 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.457901] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181923MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1374.458073] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1374.458248] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1374.529327] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 3394162c-605f-40a1-9dc8-dc5cba6a083f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1374.529496] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 26f77431-9a5d-444d-b345-10108c34b59b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1374.529629] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance a8e7708c-b9ee-465b-8df8-798983c6f06c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1374.529751] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance a7371133-1ff3-4016-84fc-a59a9ef6d445 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1374.529872] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance b2dbce45-4bfa-4356-b608-e44e5a15c081 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1374.529991] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1374.530108] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance f7e1c6c5-752e-4fef-b84f-232b2dbee4a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1374.530223] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance eafa3522-51e3-4582-b060-3e3ac4224ae2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1374.530338] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance c9f74904-0558-42e6-a454-c7103b2873b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1374.530451] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance efb3f108-d3b3-4ebf-a51f-84dc8274f857 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1374.541807] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ebd4070e-7944-4d2f-8668-01d0ceca0c67 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1374.552698] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d6f7828e-6617-40ca-9f6c-e3a72c328dc9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1374.562529] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 185855b4-f378-45b6-9603-081b3c1b2c71 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1374.572514] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 7c578516-d2b7-4b5e-aaac-5831d2262c44 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1374.582224] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 189f5d4f-7a0e-4d49-a0c9-04e886b35383 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1374.591599] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8e01cf72-2e56-493d-8723-2e51398a7697 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1374.602076] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 95ba0df5-846c-4e5d-94e6-fd9c43dcc191 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1374.615904] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d69cb3f5-b385-432a-b562-87d0b1b0877b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1374.628677] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 352b5eae-b809-4cb5-8970-ebad9fba78bc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1374.628963] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1374.629114] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1374.891932] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11aa7224-0dd9-4f4c-a244-1e52b91a1d9e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.899680] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04034bf6-f316-4781-a6cb-759ea1a3e641 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.930978] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0675d5d1-ddba-4963-bb9d-a865ef9b17f0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.938814] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315edfc1-0b66-4987-8721-dd2c956f0062 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.952170] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1374.960910] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1374.979148] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1374.979354] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.521s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1375.671094] env[61868]: WARNING oslo_vmware.rw_handles [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1375.671094] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1375.671094] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1375.671094] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1375.671094] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1375.671094] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 1375.671094] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1375.671094] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1375.671094] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1375.671094] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1375.671094] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1375.671094] env[61868]: ERROR oslo_vmware.rw_handles [ 1375.671808] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/3523d263-4c82-4ed3-b0fe-23adf1708e7e/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1375.673635] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1375.673921] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Copying Virtual Disk [datastore2] vmware_temp/3523d263-4c82-4ed3-b0fe-23adf1708e7e/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/3523d263-4c82-4ed3-b0fe-23adf1708e7e/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1375.674303] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4cd22f27-e191-4da1-8994-d7b9f5798fcf {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.686388] env[61868]: DEBUG oslo_vmware.api [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Waiting for the task: (returnval){ [ 1375.686388] env[61868]: value = "task-41107" [ 1375.686388] env[61868]: _type = "Task" [ 1375.686388] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.699337] env[61868]: DEBUG oslo_vmware.api [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Task: {'id': task-41107, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.198537] env[61868]: DEBUG oslo_vmware.exceptions [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1376.198884] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1376.199467] env[61868]: ERROR nova.compute.manager [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1376.199467] env[61868]: Faults: ['InvalidArgument'] [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Traceback (most recent call last): [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] yield resources [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] self.driver.spawn(context, instance, image_meta, [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] self._fetch_image_if_missing(context, vi) [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] image_cache(vi, tmp_image_ds_loc) [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] vm_util.copy_virtual_disk( [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] session._wait_for_task(vmdk_copy_task) [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] return self.wait_for_task(task_ref) [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] return evt.wait() [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] result = hub.switch() [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] return self.greenlet.switch() [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] self.f(*self.args, **self.kw) [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] raise exceptions.translate_fault(task_info.error) [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Faults: ['InvalidArgument'] [ 1376.199467] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] [ 1376.201944] env[61868]: INFO nova.compute.manager [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Terminating instance [ 1376.201944] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1376.201944] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1376.201944] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-271d354b-5282-43ff-9064-530e10a959d3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.204225] env[61868]: DEBUG nova.compute.manager [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1376.204417] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1376.205187] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-810822cd-8715-44d7-af84-ce6c9379fb74 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.214742] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1376.215028] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c66dd71a-7c49-44dd-8a0f-b12251204392 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.218159] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1376.218340] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1376.219539] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-838898ed-e32c-4385-b8f3-27a4c2ffee53 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.225698] env[61868]: DEBUG oslo_vmware.api [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for the task: (returnval){ [ 1376.225698] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]528cf891-38ba-2712-2c67-4a9845feb985" [ 1376.225698] env[61868]: _type = "Task" [ 1376.225698] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.234845] env[61868]: DEBUG oslo_vmware.api [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]528cf891-38ba-2712-2c67-4a9845feb985, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.337303] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1376.337616] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1376.338100] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Deleting the datastore file [datastore2] 3394162c-605f-40a1-9dc8-dc5cba6a083f {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1376.338231] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-10392f23-14f8-4f97-b55e-f73c5d9bb0eb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.347262] env[61868]: DEBUG oslo_vmware.api [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Waiting for the task: (returnval){ [ 1376.347262] env[61868]: value = "task-41109" [ 1376.347262] env[61868]: _type = "Task" [ 1376.347262] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.356954] env[61868]: DEBUG oslo_vmware.api [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Task: {'id': task-41109, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.736380] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1376.736705] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Creating directory with path [datastore2] vmware_temp/8eb25595-bb26-4347-9608-a07ad344e4bb/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1376.736912] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c07834a-4a65-4380-9c6b-ce4efd9c53ef {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.756975] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Created directory with path [datastore2] vmware_temp/8eb25595-bb26-4347-9608-a07ad344e4bb/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1376.756975] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Fetch image to [datastore2] vmware_temp/8eb25595-bb26-4347-9608-a07ad344e4bb/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1376.757172] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/8eb25595-bb26-4347-9608-a07ad344e4bb/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1376.757895] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01cf97e6-f1ea-482a-819e-87b7772a1f25 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.766327] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5a129fa-a58f-4b1f-8f70-496c3c666e67 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.778597] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd892d3-8c1c-4875-b253-9dd88bb78cb7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.810475] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b56b7f6-66ec-45e4-9a02-9d0e1cb017a3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.817775] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b101d473-1808-4d02-8a9a-cbcf91f1f32e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.845720] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1376.857645] env[61868]: DEBUG oslo_vmware.api [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Task: {'id': task-41109, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.089279} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.857853] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1376.858099] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1376.858299] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1376.858476] env[61868]: INFO nova.compute.manager [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Took 0.65 seconds to destroy the instance on the hypervisor. [ 1376.861191] env[61868]: DEBUG nova.compute.claims [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1376.861393] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1376.861653] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1376.910961] env[61868]: DEBUG oslo_vmware.rw_handles [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8eb25595-bb26-4347-9608-a07ad344e4bb/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1376.986613] env[61868]: DEBUG oslo_vmware.rw_handles [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1376.986613] env[61868]: DEBUG oslo_vmware.rw_handles [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8eb25595-bb26-4347-9608-a07ad344e4bb/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1377.279239] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f776fe-1af9-4a04-800e-650393074d02 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.288035] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d539d41f-0964-4b73-85f7-832e2a5586c5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.321201] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-912405cd-34ac-4ad6-a523-5c4a65130078 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.329493] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d9eb127-df60-43d7-9b5c-ea82196461f2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.350433] env[61868]: DEBUG nova.compute.provider_tree [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1377.360458] env[61868]: DEBUG nova.scheduler.client.report [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1377.382622] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.521s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1377.383195] env[61868]: ERROR nova.compute.manager [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1377.383195] env[61868]: Faults: ['InvalidArgument'] [ 1377.383195] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Traceback (most recent call last): [ 1377.383195] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1377.383195] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] self.driver.spawn(context, instance, image_meta, [ 1377.383195] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1377.383195] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1377.383195] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1377.383195] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] self._fetch_image_if_missing(context, vi) [ 1377.383195] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1377.383195] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] image_cache(vi, tmp_image_ds_loc) [ 1377.383195] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1377.383195] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] vm_util.copy_virtual_disk( [ 1377.383195] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1377.383195] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] session._wait_for_task(vmdk_copy_task) [ 1377.383195] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1377.383195] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] return self.wait_for_task(task_ref) [ 1377.383195] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1377.383195] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] return evt.wait() [ 1377.383195] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1377.383195] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] result = hub.switch() [ 1377.383195] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1377.383195] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] return self.greenlet.switch() [ 1377.383195] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1377.383195] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] self.f(*self.args, **self.kw) [ 1377.383195] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1377.383195] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] raise exceptions.translate_fault(task_info.error) [ 1377.383195] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1377.383195] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Faults: ['InvalidArgument'] [ 1377.383195] env[61868]: ERROR nova.compute.manager [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] [ 1377.384031] env[61868]: DEBUG nova.compute.utils [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1377.385480] env[61868]: DEBUG nova.compute.manager [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Build of instance 3394162c-605f-40a1-9dc8-dc5cba6a083f was re-scheduled: A specified parameter was not correct: fileType [ 1377.385480] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1377.385859] env[61868]: DEBUG nova.compute.manager [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1377.386035] env[61868]: DEBUG nova.compute.manager [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1377.386195] env[61868]: DEBUG nova.compute.manager [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1377.386360] env[61868]: DEBUG nova.network.neutron [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1378.084014] env[61868]: DEBUG nova.network.neutron [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1378.099966] env[61868]: INFO nova.compute.manager [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Took 0.71 seconds to deallocate network for instance. [ 1378.214785] env[61868]: INFO nova.scheduler.client.report [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Deleted allocations for instance 3394162c-605f-40a1-9dc8-dc5cba6a083f [ 1378.245646] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f939953-a7cb-4e7b-b906-184203fbc784 tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Lock "3394162c-605f-40a1-9dc8-dc5cba6a083f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 562.372s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1378.247093] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c7de2917-7b6a-4274-a6c0-456ec5d94c4f tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Lock "3394162c-605f-40a1-9dc8-dc5cba6a083f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 364.636s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1378.247371] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c7de2917-7b6a-4274-a6c0-456ec5d94c4f tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Acquiring lock "3394162c-605f-40a1-9dc8-dc5cba6a083f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1378.247624] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c7de2917-7b6a-4274-a6c0-456ec5d94c4f tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Lock "3394162c-605f-40a1-9dc8-dc5cba6a083f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1378.247804] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c7de2917-7b6a-4274-a6c0-456ec5d94c4f tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Lock "3394162c-605f-40a1-9dc8-dc5cba6a083f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1378.250576] env[61868]: INFO nova.compute.manager [None req-c7de2917-7b6a-4274-a6c0-456ec5d94c4f tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Terminating instance [ 1378.256326] env[61868]: DEBUG nova.compute.manager [None req-c7de2917-7b6a-4274-a6c0-456ec5d94c4f tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1378.256724] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c7de2917-7b6a-4274-a6c0-456ec5d94c4f tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1378.257291] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-859cbe40-e485-49ac-ac3e-7d4b8bbe4803 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.262057] env[61868]: DEBUG nova.compute.manager [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1378.272089] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de74d083-e05a-4ee5-abe8-7195aad28d83 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.305272] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-c7de2917-7b6a-4274-a6c0-456ec5d94c4f tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3394162c-605f-40a1-9dc8-dc5cba6a083f could not be found. [ 1378.305832] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c7de2917-7b6a-4274-a6c0-456ec5d94c4f tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1378.306177] env[61868]: INFO nova.compute.manager [None req-c7de2917-7b6a-4274-a6c0-456ec5d94c4f tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1378.306550] env[61868]: DEBUG oslo.service.loopingcall [None req-c7de2917-7b6a-4274-a6c0-456ec5d94c4f tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1378.309796] env[61868]: DEBUG nova.compute.manager [-] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1378.310041] env[61868]: DEBUG nova.network.neutron [-] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1378.325494] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1378.326111] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1378.327699] env[61868]: INFO nova.compute.claims [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1378.341238] env[61868]: DEBUG nova.network.neutron [-] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1378.359042] env[61868]: INFO nova.compute.manager [-] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] Took 0.05 seconds to deallocate network for instance. [ 1378.459141] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c7de2917-7b6a-4274-a6c0-456ec5d94c4f tempest-ServersTestMultiNic-969860196 tempest-ServersTestMultiNic-969860196-project-member] Lock "3394162c-605f-40a1-9dc8-dc5cba6a083f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.212s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1378.459986] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "3394162c-605f-40a1-9dc8-dc5cba6a083f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 228.800s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1378.460287] env[61868]: INFO nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 3394162c-605f-40a1-9dc8-dc5cba6a083f] During sync_power_state the instance has a pending task (deleting). Skip. [ 1378.460468] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "3394162c-605f-40a1-9dc8-dc5cba6a083f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1378.647239] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5fa1273-1ae7-4aaf-a937-9c2cda9452c3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.656015] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb37e19-6511-4829-8c62-cc7ba0d7d322 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.687950] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa30732-6596-4ad5-90d7-e16f64a0d1cf {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.697163] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-005fca51-5fbc-43fd-8b22-0bfc0000f3f3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.712322] env[61868]: DEBUG nova.compute.provider_tree [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1378.721616] env[61868]: DEBUG nova.scheduler.client.report [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1378.740160] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.414s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1378.740622] env[61868]: DEBUG nova.compute.manager [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1378.779306] env[61868]: DEBUG nova.compute.utils [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1378.781173] env[61868]: DEBUG nova.compute.manager [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1378.781341] env[61868]: DEBUG nova.network.neutron [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1378.795624] env[61868]: DEBUG nova.compute.manager [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1378.871804] env[61868]: DEBUG nova.compute.manager [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1378.882205] env[61868]: DEBUG nova.policy [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bf74f15b527146bb9bc726e54d220a65', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '01d5fac165e449d49cd6e9d9c7e9d116', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 1378.909861] env[61868]: DEBUG nova.virt.hardware [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1378.910300] env[61868]: DEBUG nova.virt.hardware [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1378.910667] env[61868]: DEBUG nova.virt.hardware [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1378.910985] env[61868]: DEBUG nova.virt.hardware [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1378.911343] env[61868]: DEBUG nova.virt.hardware [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1378.911593] env[61868]: DEBUG nova.virt.hardware [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1378.911944] env[61868]: DEBUG nova.virt.hardware [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1378.912242] env[61868]: DEBUG nova.virt.hardware [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1378.912536] env[61868]: DEBUG nova.virt.hardware [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1378.912810] env[61868]: DEBUG nova.virt.hardware [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1378.913092] env[61868]: DEBUG nova.virt.hardware [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1378.914060] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30116879-6257-44e4-91ad-3d28ad67ff69 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.924666] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49130eed-ff8c-4be8-a810-6ad39567ed9c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.616841] env[61868]: DEBUG nova.network.neutron [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Successfully created port: e00269ab-f36b-459f-ba1b-6468a5dd675c {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1379.953552] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1379.953765] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 1380.351429] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1380.779113] env[61868]: DEBUG nova.network.neutron [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Successfully updated port: e00269ab-f36b-459f-ba1b-6468a5dd675c {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1380.790857] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "refresh_cache-ebd4070e-7944-4d2f-8668-01d0ceca0c67" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1380.791125] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquired lock "refresh_cache-ebd4070e-7944-4d2f-8668-01d0ceca0c67" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1380.791289] env[61868]: DEBUG nova.network.neutron [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1380.869100] env[61868]: DEBUG nova.network.neutron [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1381.127966] env[61868]: DEBUG nova.network.neutron [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Updating instance_info_cache with network_info: [{"id": "e00269ab-f36b-459f-ba1b-6468a5dd675c", "address": "fa:16:3e:e3:32:d7", "network": {"id": "843ef760-8abf-4cfe-bcb8-9ec2e65dca2f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1286639074-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "01d5fac165e449d49cd6e9d9c7e9d116", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape00269ab-f3", "ovs_interfaceid": "e00269ab-f36b-459f-ba1b-6468a5dd675c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1381.152941] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Releasing lock "refresh_cache-ebd4070e-7944-4d2f-8668-01d0ceca0c67" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1381.153251] env[61868]: DEBUG nova.compute.manager [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Instance network_info: |[{"id": "e00269ab-f36b-459f-ba1b-6468a5dd675c", "address": "fa:16:3e:e3:32:d7", "network": {"id": "843ef760-8abf-4cfe-bcb8-9ec2e65dca2f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1286639074-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "01d5fac165e449d49cd6e9d9c7e9d116", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape00269ab-f3", "ovs_interfaceid": "e00269ab-f36b-459f-ba1b-6468a5dd675c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1381.153957] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:32:d7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4020f51-6e46-4b73-a79e-9fe3fd51b917', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e00269ab-f36b-459f-ba1b-6468a5dd675c', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1381.168532] env[61868]: DEBUG oslo.service.loopingcall [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1381.169129] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1381.169790] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f4e59ac-9f52-4004-adf1-802b687cd111 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.193898] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1381.193898] env[61868]: value = "task-41110" [ 1381.193898] env[61868]: _type = "Task" [ 1381.193898] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.207415] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41110, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.705266] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41110, 'name': CreateVM_Task, 'duration_secs': 0.352391} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.705431] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1381.706046] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1381.706263] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1381.710572] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfd5fbd0-e6d9-4917-981a-89fae6502191 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.753626] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Reconfiguring VM instance to enable vnc on port - 5906 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 1381.754027] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5cbec880-b344-4204-a58e-0a12d5e36e06 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.772180] env[61868]: DEBUG oslo_vmware.api [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for the task: (returnval){ [ 1381.772180] env[61868]: value = "task-41111" [ 1381.772180] env[61868]: _type = "Task" [ 1381.772180] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.783118] env[61868]: DEBUG oslo_vmware.api [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': task-41111, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.951843] env[61868]: DEBUG nova.compute.manager [req-a1a73e33-5012-4950-ab4f-dc7ff202f264 req-c6919c87-4a07-4ad1-b3c7-e3623f606237 service nova] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Received event network-vif-plugged-e00269ab-f36b-459f-ba1b-6468a5dd675c {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1381.951843] env[61868]: DEBUG oslo_concurrency.lockutils [req-a1a73e33-5012-4950-ab4f-dc7ff202f264 req-c6919c87-4a07-4ad1-b3c7-e3623f606237 service nova] Acquiring lock "ebd4070e-7944-4d2f-8668-01d0ceca0c67-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1381.951843] env[61868]: DEBUG oslo_concurrency.lockutils [req-a1a73e33-5012-4950-ab4f-dc7ff202f264 req-c6919c87-4a07-4ad1-b3c7-e3623f606237 service nova] Lock "ebd4070e-7944-4d2f-8668-01d0ceca0c67-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1381.951843] env[61868]: DEBUG oslo_concurrency.lockutils [req-a1a73e33-5012-4950-ab4f-dc7ff202f264 req-c6919c87-4a07-4ad1-b3c7-e3623f606237 service nova] Lock "ebd4070e-7944-4d2f-8668-01d0ceca0c67-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1381.951843] env[61868]: DEBUG nova.compute.manager [req-a1a73e33-5012-4950-ab4f-dc7ff202f264 req-c6919c87-4a07-4ad1-b3c7-e3623f606237 service nova] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] No waiting events found dispatching network-vif-plugged-e00269ab-f36b-459f-ba1b-6468a5dd675c {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1381.952076] env[61868]: WARNING nova.compute.manager [req-a1a73e33-5012-4950-ab4f-dc7ff202f264 req-c6919c87-4a07-4ad1-b3c7-e3623f606237 service nova] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Received unexpected event network-vif-plugged-e00269ab-f36b-459f-ba1b-6468a5dd675c for instance with vm_state building and task_state spawning. [ 1381.952260] env[61868]: DEBUG nova.compute.manager [req-a1a73e33-5012-4950-ab4f-dc7ff202f264 req-c6919c87-4a07-4ad1-b3c7-e3623f606237 service nova] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Received event network-changed-e00269ab-f36b-459f-ba1b-6468a5dd675c {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1381.952418] env[61868]: DEBUG nova.compute.manager [req-a1a73e33-5012-4950-ab4f-dc7ff202f264 req-c6919c87-4a07-4ad1-b3c7-e3623f606237 service nova] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Refreshing instance network info cache due to event network-changed-e00269ab-f36b-459f-ba1b-6468a5dd675c. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1381.952599] env[61868]: DEBUG oslo_concurrency.lockutils [req-a1a73e33-5012-4950-ab4f-dc7ff202f264 req-c6919c87-4a07-4ad1-b3c7-e3623f606237 service nova] Acquiring lock "refresh_cache-ebd4070e-7944-4d2f-8668-01d0ceca0c67" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1381.952734] env[61868]: DEBUG oslo_concurrency.lockutils [req-a1a73e33-5012-4950-ab4f-dc7ff202f264 req-c6919c87-4a07-4ad1-b3c7-e3623f606237 service nova] Acquired lock "refresh_cache-ebd4070e-7944-4d2f-8668-01d0ceca0c67" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1381.952888] env[61868]: DEBUG nova.network.neutron [req-a1a73e33-5012-4950-ab4f-dc7ff202f264 req-c6919c87-4a07-4ad1-b3c7-e3623f606237 service nova] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Refreshing network info cache for port e00269ab-f36b-459f-ba1b-6468a5dd675c {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1382.216778] env[61868]: DEBUG nova.network.neutron [req-a1a73e33-5012-4950-ab4f-dc7ff202f264 req-c6919c87-4a07-4ad1-b3c7-e3623f606237 service nova] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Updated VIF entry in instance network info cache for port e00269ab-f36b-459f-ba1b-6468a5dd675c. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1382.217131] env[61868]: DEBUG nova.network.neutron [req-a1a73e33-5012-4950-ab4f-dc7ff202f264 req-c6919c87-4a07-4ad1-b3c7-e3623f606237 service nova] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Updating instance_info_cache with network_info: [{"id": "e00269ab-f36b-459f-ba1b-6468a5dd675c", "address": "fa:16:3e:e3:32:d7", "network": {"id": "843ef760-8abf-4cfe-bcb8-9ec2e65dca2f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1286639074-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "01d5fac165e449d49cd6e9d9c7e9d116", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape00269ab-f3", "ovs_interfaceid": "e00269ab-f36b-459f-ba1b-6468a5dd675c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1382.228234] env[61868]: DEBUG oslo_concurrency.lockutils [req-a1a73e33-5012-4950-ab4f-dc7ff202f264 req-c6919c87-4a07-4ad1-b3c7-e3623f606237 service nova] Releasing lock "refresh_cache-ebd4070e-7944-4d2f-8668-01d0ceca0c67" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1382.283516] env[61868]: DEBUG oslo_vmware.api [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': task-41111, 'name': ReconfigVM_Task, 'duration_secs': 0.112352} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.283840] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Reconfigured VM instance to enable vnc on port - 5906 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 1382.284118] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.578s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1382.284300] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1382.284447] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1382.284769] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1382.285048] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd9f50b6-0ecc-40c3-b3fd-a3c588a67d9f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.290370] env[61868]: DEBUG oslo_vmware.api [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for the task: (returnval){ [ 1382.290370] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]5215bd82-c2d4-dac3-0c4c-cab0af4e2d55" [ 1382.290370] env[61868]: _type = "Task" [ 1382.290370] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.302092] env[61868]: DEBUG oslo_vmware.api [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]5215bd82-c2d4-dac3-0c4c-cab0af4e2d55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.425868] env[61868]: DEBUG oslo_concurrency.lockutils [None req-22f999bb-c19e-430e-97c1-138316b1186d tempest-ServerPasswordTestJSON-936178413 tempest-ServerPasswordTestJSON-936178413-project-member] Acquiring lock "11a56664-d4bc-4090-8de0-52cfeb8f37e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1382.426244] env[61868]: DEBUG oslo_concurrency.lockutils [None req-22f999bb-c19e-430e-97c1-138316b1186d tempest-ServerPasswordTestJSON-936178413 tempest-ServerPasswordTestJSON-936178413-project-member] Lock "11a56664-d4bc-4090-8de0-52cfeb8f37e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1382.801341] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1382.801629] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1382.801768] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1395.027526] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Acquiring lock "97840d8b-90ee-432e-988a-30548b61381b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1395.027838] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Lock "97840d8b-90ee-432e-988a-30548b61381b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1395.203054] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2a9fdf4d-83d0-43c8-ac33-d6af08d4e0d1 tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Acquiring lock "51d7ab97-1a1c-49e6-a989-cb1b49e30d2e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1395.203290] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2a9fdf4d-83d0-43c8-ac33-d6af08d4e0d1 tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Lock "51d7ab97-1a1c-49e6-a989-cb1b49e30d2e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1395.800674] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a7b2802f-e138-4b2d-80c5-6dbe5aadd44d tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "f92ea472-0354-45a7-a29a-569faaf460f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1395.801399] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a7b2802f-e138-4b2d-80c5-6dbe5aadd44d tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "f92ea472-0354-45a7-a29a-569faaf460f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1414.942973] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d6ae048e-84eb-4407-9624-21d5f93e37c4 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Acquiring lock "efb3f108-d3b3-4ebf-a51f-84dc8274f857" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1419.707502] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c89e4471-2898-4e07-b830-be917f8ddb3e tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "ebd4070e-7944-4d2f-8668-01d0ceca0c67" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1423.352136] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1423.352545] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Cleaning up deleted instances {{(pid=61868) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 1423.366146] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] There are 1 instances to clean {{(pid=61868) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1423.366473] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 16eb032d-fe34-4a46-883c-8b937806d63f] Instance has had 0 of 5 cleanup attempts {{(pid=61868) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11232}} [ 1423.413965] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1424.984139] env[61868]: WARNING oslo_vmware.rw_handles [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1424.984139] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1424.984139] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1424.984139] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1424.984139] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1424.984139] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 1424.984139] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1424.984139] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1424.984139] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1424.984139] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1424.984139] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1424.984139] env[61868]: ERROR oslo_vmware.rw_handles [ 1424.985025] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/8eb25595-bb26-4347-9608-a07ad344e4bb/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1424.986591] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1424.986855] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Copying Virtual Disk [datastore2] vmware_temp/8eb25595-bb26-4347-9608-a07ad344e4bb/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/8eb25595-bb26-4347-9608-a07ad344e4bb/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1424.987196] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c80d3c40-4b57-495a-b1f5-b996f620056a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.995954] env[61868]: DEBUG oslo_vmware.api [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for the task: (returnval){ [ 1424.995954] env[61868]: value = "task-41112" [ 1424.995954] env[61868]: _type = "Task" [ 1424.995954] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.005062] env[61868]: DEBUG oslo_vmware.api [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': task-41112, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.506206] env[61868]: DEBUG oslo_vmware.exceptions [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1425.506498] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1425.507187] env[61868]: ERROR nova.compute.manager [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1425.507187] env[61868]: Faults: ['InvalidArgument'] [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Traceback (most recent call last): [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] yield resources [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] self.driver.spawn(context, instance, image_meta, [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] self._fetch_image_if_missing(context, vi) [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] image_cache(vi, tmp_image_ds_loc) [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] vm_util.copy_virtual_disk( [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] session._wait_for_task(vmdk_copy_task) [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] return self.wait_for_task(task_ref) [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] return evt.wait() [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] result = hub.switch() [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] return self.greenlet.switch() [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] self.f(*self.args, **self.kw) [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] raise exceptions.translate_fault(task_info.error) [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Faults: ['InvalidArgument'] [ 1425.507187] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] [ 1425.508110] env[61868]: INFO nova.compute.manager [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Terminating instance [ 1425.509111] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1425.509317] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1425.509564] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-94aec813-244f-43f0-813e-de5c62165154 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.511831] env[61868]: DEBUG nova.compute.manager [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1425.512075] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1425.512801] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa4ae5fc-4877-44eb-b75e-09ecc084b36d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.520017] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1425.520276] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-11241bdd-bd98-4c55-9313-1135bf146e8c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.522592] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1425.522767] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1425.523757] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5634f4bd-d3ca-4fec-874f-f5bf070cdd05 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.528807] env[61868]: DEBUG oslo_vmware.api [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Waiting for the task: (returnval){ [ 1425.528807] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]527ae465-e664-c822-fa59-0ada9bee9332" [ 1425.528807] env[61868]: _type = "Task" [ 1425.528807] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.537713] env[61868]: DEBUG oslo_vmware.api [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]527ae465-e664-c822-fa59-0ada9bee9332, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.592679] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1425.593194] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1425.593530] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Deleting the datastore file [datastore2] 26f77431-9a5d-444d-b345-10108c34b59b {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1425.593914] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-912e7e82-6dc5-4ba0-aa59-6b0794d9789b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.601405] env[61868]: DEBUG oslo_vmware.api [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for the task: (returnval){ [ 1425.601405] env[61868]: value = "task-41114" [ 1425.601405] env[61868]: _type = "Task" [ 1425.601405] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.611487] env[61868]: DEBUG oslo_vmware.api [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': task-41114, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.039840] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1426.040540] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Creating directory with path [datastore2] vmware_temp/dbfec10b-5134-47ba-b20e-d75ec9819c3b/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1426.040927] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a7c0b441-a352-40f6-901e-5b965baeb1c3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.054056] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Created directory with path [datastore2] vmware_temp/dbfec10b-5134-47ba-b20e-d75ec9819c3b/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1426.054483] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Fetch image to [datastore2] vmware_temp/dbfec10b-5134-47ba-b20e-d75ec9819c3b/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1426.054820] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/dbfec10b-5134-47ba-b20e-d75ec9819c3b/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1426.055744] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f3ca7e9-e9a5-479a-a7a8-5495e354973f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.063292] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e5d13e-2588-4baf-9a34-707636c1d063 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.073883] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26bd5d91-bf0b-4145-ac09-a708fb362904 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.114194] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d9ef4d2-d7d3-467d-b5da-4597a3a68bb7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.124341] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-95764caa-dee1-4cf6-b6d5-82246da193aa {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.126550] env[61868]: DEBUG oslo_vmware.api [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': task-41114, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.092363} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.126993] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1426.127299] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1426.127625] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1426.127926] env[61868]: INFO nova.compute.manager [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1426.130406] env[61868]: DEBUG nova.compute.claims [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1426.130724] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1426.131108] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1426.151565] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1426.204830] env[61868]: DEBUG oslo_vmware.rw_handles [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dbfec10b-5134-47ba-b20e-d75ec9819c3b/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1426.258876] env[61868]: DEBUG nova.scheduler.client.report [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Refreshing inventories for resource provider 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1426.263094] env[61868]: DEBUG oslo_vmware.rw_handles [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1426.263295] env[61868]: DEBUG oslo_vmware.rw_handles [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dbfec10b-5134-47ba-b20e-d75ec9819c3b/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1426.272564] env[61868]: DEBUG nova.scheduler.client.report [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Updating ProviderTree inventory for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1426.272791] env[61868]: DEBUG nova.compute.provider_tree [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Updating inventory in ProviderTree for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1426.284780] env[61868]: DEBUG nova.scheduler.client.report [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Refreshing aggregate associations for resource provider 6539a0d3-09f9-481f-a837-7ea10081c3cc, aggregates: None {{(pid=61868) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1426.302309] env[61868]: DEBUG nova.scheduler.client.report [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Refreshing trait associations for resource provider 6539a0d3-09f9-481f-a837-7ea10081c3cc, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61868) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1426.419338] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1426.591736] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03122c01-b1bd-4597-ab69-2e17c546cd34 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.599668] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-012aa478-d278-43f3-9f63-3c2549b16eda {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.632456] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf936d7-0924-4006-ac2f-1322e0ed4ea2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.640377] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f253301b-f62f-4fe8-895d-162d0a7d577a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.654186] env[61868]: DEBUG nova.compute.provider_tree [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1426.663764] env[61868]: DEBUG nova.scheduler.client.report [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1426.681578] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.550s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1426.682198] env[61868]: ERROR nova.compute.manager [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1426.682198] env[61868]: Faults: ['InvalidArgument'] [ 1426.682198] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Traceback (most recent call last): [ 1426.682198] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1426.682198] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] self.driver.spawn(context, instance, image_meta, [ 1426.682198] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1426.682198] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1426.682198] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1426.682198] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] self._fetch_image_if_missing(context, vi) [ 1426.682198] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1426.682198] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] image_cache(vi, tmp_image_ds_loc) [ 1426.682198] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1426.682198] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] vm_util.copy_virtual_disk( [ 1426.682198] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1426.682198] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] session._wait_for_task(vmdk_copy_task) [ 1426.682198] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1426.682198] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] return self.wait_for_task(task_ref) [ 1426.682198] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1426.682198] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] return evt.wait() [ 1426.682198] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1426.682198] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] result = hub.switch() [ 1426.682198] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1426.682198] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] return self.greenlet.switch() [ 1426.682198] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1426.682198] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] self.f(*self.args, **self.kw) [ 1426.682198] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1426.682198] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] raise exceptions.translate_fault(task_info.error) [ 1426.682198] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1426.682198] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Faults: ['InvalidArgument'] [ 1426.682198] env[61868]: ERROR nova.compute.manager [instance: 26f77431-9a5d-444d-b345-10108c34b59b] [ 1426.683151] env[61868]: DEBUG nova.compute.utils [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1426.684766] env[61868]: DEBUG nova.compute.manager [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Build of instance 26f77431-9a5d-444d-b345-10108c34b59b was re-scheduled: A specified parameter was not correct: fileType [ 1426.684766] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1426.685129] env[61868]: DEBUG nova.compute.manager [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1426.685305] env[61868]: DEBUG nova.compute.manager [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1426.685478] env[61868]: DEBUG nova.compute.manager [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1426.685696] env[61868]: DEBUG nova.network.neutron [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1426.954703] env[61868]: DEBUG nova.network.neutron [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1426.969974] env[61868]: INFO nova.compute.manager [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Took 0.28 seconds to deallocate network for instance. [ 1427.075351] env[61868]: INFO nova.scheduler.client.report [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Deleted allocations for instance 26f77431-9a5d-444d-b345-10108c34b59b [ 1427.097606] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5baa18af-c69b-41ca-8a11-082d298d37de tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "26f77431-9a5d-444d-b345-10108c34b59b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 597.624s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1427.099100] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c17f2459-b16b-4ca0-a609-e12b6d603cf6 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "26f77431-9a5d-444d-b345-10108c34b59b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 399.049s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1427.099474] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c17f2459-b16b-4ca0-a609-e12b6d603cf6 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "26f77431-9a5d-444d-b345-10108c34b59b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1427.099786] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c17f2459-b16b-4ca0-a609-e12b6d603cf6 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "26f77431-9a5d-444d-b345-10108c34b59b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1427.100086] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c17f2459-b16b-4ca0-a609-e12b6d603cf6 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "26f77431-9a5d-444d-b345-10108c34b59b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1427.102643] env[61868]: INFO nova.compute.manager [None req-c17f2459-b16b-4ca0-a609-e12b6d603cf6 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Terminating instance [ 1427.104569] env[61868]: DEBUG nova.compute.manager [None req-c17f2459-b16b-4ca0-a609-e12b6d603cf6 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1427.104772] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c17f2459-b16b-4ca0-a609-e12b6d603cf6 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1427.105333] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-52252474-0b17-48db-bb4b-196b271e82c1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.112778] env[61868]: DEBUG nova.compute.manager [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1427.120030] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddccb438-4370-44b9-9f22-43140be609e9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.149865] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-c17f2459-b16b-4ca0-a609-e12b6d603cf6 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 26f77431-9a5d-444d-b345-10108c34b59b could not be found. [ 1427.150092] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c17f2459-b16b-4ca0-a609-e12b6d603cf6 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1427.150272] env[61868]: INFO nova.compute.manager [None req-c17f2459-b16b-4ca0-a609-e12b6d603cf6 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1427.150517] env[61868]: DEBUG oslo.service.loopingcall [None req-c17f2459-b16b-4ca0-a609-e12b6d603cf6 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1427.150768] env[61868]: DEBUG nova.compute.manager [-] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1427.150881] env[61868]: DEBUG nova.network.neutron [-] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1427.168073] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1427.168402] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1427.170190] env[61868]: INFO nova.compute.claims [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1427.181917] env[61868]: DEBUG nova.network.neutron [-] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1427.192719] env[61868]: INFO nova.compute.manager [-] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] Took 0.04 seconds to deallocate network for instance. [ 1427.335290] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c17f2459-b16b-4ca0-a609-e12b6d603cf6 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "26f77431-9a5d-444d-b345-10108c34b59b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.236s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1427.337020] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "26f77431-9a5d-444d-b345-10108c34b59b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 277.676s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1427.337176] env[61868]: INFO nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 26f77431-9a5d-444d-b345-10108c34b59b] During sync_power_state the instance has a pending task (deleting). Skip. [ 1427.337354] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "26f77431-9a5d-444d-b345-10108c34b59b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1427.528847] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb7a2a9-695f-4174-8f35-c677e8ac411f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.536982] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed2ae3f9-62b4-42a8-bd9e-cb1dce5bd22f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.569165] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-825a1f3b-9f60-4bc9-8c32-094a857ad069 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.577419] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15248c21-f63b-45db-8122-74c02afec84b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.591291] env[61868]: DEBUG nova.compute.provider_tree [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1427.600118] env[61868]: DEBUG nova.scheduler.client.report [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1427.618528] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.450s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1427.619137] env[61868]: DEBUG nova.compute.manager [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1427.658029] env[61868]: DEBUG nova.compute.utils [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1427.660034] env[61868]: DEBUG nova.compute.manager [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1427.660196] env[61868]: DEBUG nova.network.neutron [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1427.671685] env[61868]: DEBUG nova.compute.manager [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1427.714580] env[61868]: DEBUG nova.policy [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f43bbfc89e854ba5ae03f03e1a98c155', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '39fd476d0f164b4695fe920d42018521', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 1427.751476] env[61868]: DEBUG nova.compute.manager [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1427.774450] env[61868]: DEBUG nova.virt.hardware [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1427.774692] env[61868]: DEBUG nova.virt.hardware [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1427.774987] env[61868]: DEBUG nova.virt.hardware [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1427.775196] env[61868]: DEBUG nova.virt.hardware [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1427.775340] env[61868]: DEBUG nova.virt.hardware [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1427.775484] env[61868]: DEBUG nova.virt.hardware [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1427.775688] env[61868]: DEBUG nova.virt.hardware [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1427.775843] env[61868]: DEBUG nova.virt.hardware [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1427.776012] env[61868]: DEBUG nova.virt.hardware [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1427.776173] env[61868]: DEBUG nova.virt.hardware [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1427.776340] env[61868]: DEBUG nova.virt.hardware [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1427.777192] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac1d3ed9-bf4f-4144-a77e-d57664cfe9a2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.785969] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0505566-422a-40ec-95a5-2e08a4f6572f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.052266] env[61868]: DEBUG nova.network.neutron [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Successfully created port: c470b894-7a78-4581-a4a4-b964432c328b {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1428.606095] env[61868]: DEBUG nova.network.neutron [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Successfully updated port: c470b894-7a78-4581-a4a4-b964432c328b {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1428.615599] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquiring lock "refresh_cache-d6f7828e-6617-40ca-9f6c-e3a72c328dc9" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1428.615763] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquired lock "refresh_cache-d6f7828e-6617-40ca-9f6c-e3a72c328dc9" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1428.616024] env[61868]: DEBUG nova.network.neutron [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1428.663262] env[61868]: DEBUG nova.network.neutron [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1428.818858] env[61868]: DEBUG nova.network.neutron [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Updating instance_info_cache with network_info: [{"id": "c470b894-7a78-4581-a4a4-b964432c328b", "address": "fa:16:3e:98:54:d8", "network": {"id": "b71fb302-0271-4727-ba20-75991d2de70e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1625994717-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "39fd476d0f164b4695fe920d42018521", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc470b894-7a", "ovs_interfaceid": "c470b894-7a78-4581-a4a4-b964432c328b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1428.835678] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Releasing lock "refresh_cache-d6f7828e-6617-40ca-9f6c-e3a72c328dc9" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1428.836037] env[61868]: DEBUG nova.compute.manager [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Instance network_info: |[{"id": "c470b894-7a78-4581-a4a4-b964432c328b", "address": "fa:16:3e:98:54:d8", "network": {"id": "b71fb302-0271-4727-ba20-75991d2de70e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1625994717-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "39fd476d0f164b4695fe920d42018521", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc470b894-7a", "ovs_interfaceid": "c470b894-7a78-4581-a4a4-b964432c328b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1428.836587] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:54:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5b8af79a-31d5-4d78-93d7-3919aa1d9186', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c470b894-7a78-4581-a4a4-b964432c328b', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1428.844615] env[61868]: DEBUG oslo.service.loopingcall [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1428.845623] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1428.845892] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5b245711-db6b-422d-a3b8-90e8b242f18a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.869662] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1428.869662] env[61868]: value = "task-41115" [ 1428.869662] env[61868]: _type = "Task" [ 1428.869662] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.879104] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41115, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.992593] env[61868]: DEBUG nova.compute.manager [req-5e0dd1f0-0566-471d-a3ea-ee349e3f77c4 req-f7f1a6e3-d059-45ec-ae38-2518948b6593 service nova] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Received event network-vif-plugged-c470b894-7a78-4581-a4a4-b964432c328b {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1428.992970] env[61868]: DEBUG oslo_concurrency.lockutils [req-5e0dd1f0-0566-471d-a3ea-ee349e3f77c4 req-f7f1a6e3-d059-45ec-ae38-2518948b6593 service nova] Acquiring lock "d6f7828e-6617-40ca-9f6c-e3a72c328dc9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1428.993347] env[61868]: DEBUG oslo_concurrency.lockutils [req-5e0dd1f0-0566-471d-a3ea-ee349e3f77c4 req-f7f1a6e3-d059-45ec-ae38-2518948b6593 service nova] Lock "d6f7828e-6617-40ca-9f6c-e3a72c328dc9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1428.993625] env[61868]: DEBUG oslo_concurrency.lockutils [req-5e0dd1f0-0566-471d-a3ea-ee349e3f77c4 req-f7f1a6e3-d059-45ec-ae38-2518948b6593 service nova] Lock "d6f7828e-6617-40ca-9f6c-e3a72c328dc9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1428.993901] env[61868]: DEBUG nova.compute.manager [req-5e0dd1f0-0566-471d-a3ea-ee349e3f77c4 req-f7f1a6e3-d059-45ec-ae38-2518948b6593 service nova] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] No waiting events found dispatching network-vif-plugged-c470b894-7a78-4581-a4a4-b964432c328b {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1428.994205] env[61868]: WARNING nova.compute.manager [req-5e0dd1f0-0566-471d-a3ea-ee349e3f77c4 req-f7f1a6e3-d059-45ec-ae38-2518948b6593 service nova] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Received unexpected event network-vif-plugged-c470b894-7a78-4581-a4a4-b964432c328b for instance with vm_state building and task_state spawning. [ 1428.994495] env[61868]: DEBUG nova.compute.manager [req-5e0dd1f0-0566-471d-a3ea-ee349e3f77c4 req-f7f1a6e3-d059-45ec-ae38-2518948b6593 service nova] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Received event network-changed-c470b894-7a78-4581-a4a4-b964432c328b {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1428.994760] env[61868]: DEBUG nova.compute.manager [req-5e0dd1f0-0566-471d-a3ea-ee349e3f77c4 req-f7f1a6e3-d059-45ec-ae38-2518948b6593 service nova] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Refreshing instance network info cache due to event network-changed-c470b894-7a78-4581-a4a4-b964432c328b. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1428.995045] env[61868]: DEBUG oslo_concurrency.lockutils [req-5e0dd1f0-0566-471d-a3ea-ee349e3f77c4 req-f7f1a6e3-d059-45ec-ae38-2518948b6593 service nova] Acquiring lock "refresh_cache-d6f7828e-6617-40ca-9f6c-e3a72c328dc9" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1428.995286] env[61868]: DEBUG oslo_concurrency.lockutils [req-5e0dd1f0-0566-471d-a3ea-ee349e3f77c4 req-f7f1a6e3-d059-45ec-ae38-2518948b6593 service nova] Acquired lock "refresh_cache-d6f7828e-6617-40ca-9f6c-e3a72c328dc9" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1428.995541] env[61868]: DEBUG nova.network.neutron [req-5e0dd1f0-0566-471d-a3ea-ee349e3f77c4 req-f7f1a6e3-d059-45ec-ae38-2518948b6593 service nova] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Refreshing network info cache for port c470b894-7a78-4581-a4a4-b964432c328b {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1429.252061] env[61868]: DEBUG nova.network.neutron [req-5e0dd1f0-0566-471d-a3ea-ee349e3f77c4 req-f7f1a6e3-d059-45ec-ae38-2518948b6593 service nova] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Updated VIF entry in instance network info cache for port c470b894-7a78-4581-a4a4-b964432c328b. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1429.252651] env[61868]: DEBUG nova.network.neutron [req-5e0dd1f0-0566-471d-a3ea-ee349e3f77c4 req-f7f1a6e3-d059-45ec-ae38-2518948b6593 service nova] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Updating instance_info_cache with network_info: [{"id": "c470b894-7a78-4581-a4a4-b964432c328b", "address": "fa:16:3e:98:54:d8", "network": {"id": "b71fb302-0271-4727-ba20-75991d2de70e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1625994717-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "39fd476d0f164b4695fe920d42018521", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc470b894-7a", "ovs_interfaceid": "c470b894-7a78-4581-a4a4-b964432c328b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1429.265015] env[61868]: DEBUG oslo_concurrency.lockutils [req-5e0dd1f0-0566-471d-a3ea-ee349e3f77c4 req-f7f1a6e3-d059-45ec-ae38-2518948b6593 service nova] Releasing lock "refresh_cache-d6f7828e-6617-40ca-9f6c-e3a72c328dc9" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1429.381554] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41115, 'name': CreateVM_Task, 'duration_secs': 0.316505} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.381887] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1429.382545] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1429.382793] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1429.385620] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6d7a3ea-5c8f-4cb2-959c-4fd07f8ee032 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.419309] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Reconfiguring VM instance to enable vnc on port - 5907 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 1429.419673] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-727ebff1-51fa-432b-9039-dbeea6a115ed {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.437138] env[61868]: DEBUG oslo_vmware.api [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Waiting for the task: (returnval){ [ 1429.437138] env[61868]: value = "task-41116" [ 1429.437138] env[61868]: _type = "Task" [ 1429.437138] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.448610] env[61868]: DEBUG oslo_vmware.api [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Task: {'id': task-41116, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.947442] env[61868]: DEBUG oslo_vmware.api [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Task: {'id': task-41116, 'name': ReconfigVM_Task, 'duration_secs': 0.111987} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.947813] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Reconfigured VM instance to enable vnc on port - 5907 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 1429.947926] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.565s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1429.948275] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1429.948440] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1429.948756] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1429.949017] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a796f12-8b62-4e64-b38b-f814d0ea3213 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.954013] env[61868]: DEBUG oslo_vmware.api [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Waiting for the task: (returnval){ [ 1429.954013] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52a46a9d-3621-4ae1-ba38-637b00b5dec3" [ 1429.954013] env[61868]: _type = "Task" [ 1429.954013] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.962072] env[61868]: DEBUG oslo_vmware.api [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52a46a9d-3621-4ae1-ba38-637b00b5dec3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.465006] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1430.465268] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1430.465485] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1431.351223] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1431.351678] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1431.351678] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 1431.373515] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1431.373712] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1431.373824] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1431.373950] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1431.374076] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1431.374196] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1431.374317] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1431.374435] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1431.374553] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1431.374667] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1431.374783] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 1431.375373] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1431.375516] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Cleaning up deleted instances with incomplete migration {{(pid=61868) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 1431.691386] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0d2da2c9-af9c-46f6-a306-a7f51569a5c4 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquiring lock "d6f7828e-6617-40ca-9f6c-e3a72c328dc9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1433.359546] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1434.346341] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1434.351403] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1436.351197] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1436.351620] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1436.363389] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1436.363719] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1436.363966] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1436.364192] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1436.365421] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff5fcd48-a82b-4884-b617-ffb3d4a5c16f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.376397] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e3e0ea-4177-4543-a42f-b1a6ab12a839 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.392830] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-091def36-0e2d-4d82-998a-fe516a1cc5f2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.401042] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca599bd-507f-4de6-ab3b-aede28a5d6b1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.431786] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181901MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1436.432102] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1436.432187] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1436.501318] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance a8e7708c-b9ee-465b-8df8-798983c6f06c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1436.501559] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance a7371133-1ff3-4016-84fc-a59a9ef6d445 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1436.501728] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance b2dbce45-4bfa-4356-b608-e44e5a15c081 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1436.501933] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1436.502093] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance f7e1c6c5-752e-4fef-b84f-232b2dbee4a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1436.502260] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance eafa3522-51e3-4582-b060-3e3ac4224ae2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1436.502424] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance c9f74904-0558-42e6-a454-c7103b2873b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1436.502586] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance efb3f108-d3b3-4ebf-a51f-84dc8274f857 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1436.502748] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ebd4070e-7944-4d2f-8668-01d0ceca0c67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1436.502909] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d6f7828e-6617-40ca-9f6c-e3a72c328dc9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1436.515274] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 185855b4-f378-45b6-9603-081b3c1b2c71 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1436.526810] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 7c578516-d2b7-4b5e-aaac-5831d2262c44 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1436.539452] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 189f5d4f-7a0e-4d49-a0c9-04e886b35383 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1436.551037] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8e01cf72-2e56-493d-8723-2e51398a7697 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1436.561818] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 95ba0df5-846c-4e5d-94e6-fd9c43dcc191 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1436.572783] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d69cb3f5-b385-432a-b562-87d0b1b0877b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1436.586011] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 352b5eae-b809-4cb5-8970-ebad9fba78bc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1436.597547] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 11a56664-d4bc-4090-8de0-52cfeb8f37e3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1436.631421] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 97840d8b-90ee-432e-988a-30548b61381b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1436.644553] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 51d7ab97-1a1c-49e6-a989-cb1b49e30d2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1436.657095] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance f92ea472-0354-45a7-a29a-569faaf460f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1436.657384] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1436.657489] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1436.966225] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc6f37bb-d0ee-4b0c-8149-d18d70d04e75 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.974513] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea928b1f-ed04-47e2-9d30-587a5006a947 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.006957] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7d226fc-b134-44d9-a3a9-a238070808a4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.015511] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-194a4882-32b4-45c9-97eb-74f1e3c6709f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.030366] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1437.040558] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1437.059633] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1437.060009] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.628s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1441.060424] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1441.351255] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1441.351468] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 1443.609623] env[61868]: DEBUG oslo_concurrency.lockutils [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Acquiring lock "b1473dd0-5427-496c-a94c-5772635b229f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1443.609979] env[61868]: DEBUG oslo_concurrency.lockutils [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Lock "b1473dd0-5427-496c-a94c-5772635b229f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1443.644476] env[61868]: DEBUG oslo_concurrency.lockutils [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Acquiring lock "19162a19-9d7c-49b7-ad55-948d2126a61b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1443.644737] env[61868]: DEBUG oslo_concurrency.lockutils [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Lock "19162a19-9d7c-49b7-ad55-948d2126a61b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1463.693384] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c04bb216-defb-495c-88ec-335c7ead1a01 tempest-ServerActionsTestJSON-100355986 tempest-ServerActionsTestJSON-100355986-project-member] Acquiring lock "0628d2ec-4e42-4bd2-a819-a6f8e2252469" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1463.693705] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c04bb216-defb-495c-88ec-335c7ead1a01 tempest-ServerActionsTestJSON-100355986 tempest-ServerActionsTestJSON-100355986-project-member] Lock "0628d2ec-4e42-4bd2-a819-a6f8e2252469" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1474.894297] env[61868]: WARNING oslo_vmware.rw_handles [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1474.894297] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1474.894297] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1474.894297] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1474.894297] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1474.894297] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 1474.894297] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1474.894297] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1474.894297] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1474.894297] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1474.894297] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1474.894297] env[61868]: ERROR oslo_vmware.rw_handles [ 1474.894963] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/dbfec10b-5134-47ba-b20e-d75ec9819c3b/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1474.896598] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1474.896835] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Copying Virtual Disk [datastore2] vmware_temp/dbfec10b-5134-47ba-b20e-d75ec9819c3b/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/dbfec10b-5134-47ba-b20e-d75ec9819c3b/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1474.897114] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-50ee52a3-2cce-4d23-9a9d-5455bdd63196 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.905283] env[61868]: DEBUG oslo_vmware.api [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Waiting for the task: (returnval){ [ 1474.905283] env[61868]: value = "task-41117" [ 1474.905283] env[61868]: _type = "Task" [ 1474.905283] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.914326] env[61868]: DEBUG oslo_vmware.api [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Task: {'id': task-41117, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.415234] env[61868]: DEBUG oslo_vmware.exceptions [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1475.415529] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1475.416110] env[61868]: ERROR nova.compute.manager [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1475.416110] env[61868]: Faults: ['InvalidArgument'] [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Traceback (most recent call last): [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] yield resources [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] self.driver.spawn(context, instance, image_meta, [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] self._fetch_image_if_missing(context, vi) [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] image_cache(vi, tmp_image_ds_loc) [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] vm_util.copy_virtual_disk( [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] session._wait_for_task(vmdk_copy_task) [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] return self.wait_for_task(task_ref) [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] return evt.wait() [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] result = hub.switch() [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] return self.greenlet.switch() [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] self.f(*self.args, **self.kw) [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] raise exceptions.translate_fault(task_info.error) [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Faults: ['InvalidArgument'] [ 1475.416110] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] [ 1475.416961] env[61868]: INFO nova.compute.manager [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Terminating instance [ 1475.418005] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1475.418211] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1475.418459] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a78a5d3-1b03-40d0-810d-e34a8d022f38 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.420679] env[61868]: DEBUG nova.compute.manager [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1475.420920] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1475.421668] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d9238f-7504-4316-b6ba-e6695d91102b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.428697] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1475.428914] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f22ac934-dd82-4dad-bc97-5bf629a006cd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.431144] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1475.431309] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1475.432240] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1bd26491-5771-4175-b4b8-8eb4791179aa {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.436830] env[61868]: DEBUG oslo_vmware.api [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Waiting for the task: (returnval){ [ 1475.436830] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]522fb515-af94-9411-2516-a370dec53ab1" [ 1475.436830] env[61868]: _type = "Task" [ 1475.436830] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.444536] env[61868]: DEBUG oslo_vmware.api [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]522fb515-af94-9411-2516-a370dec53ab1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.503438] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1475.503653] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1475.503826] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Deleting the datastore file [datastore2] a8e7708c-b9ee-465b-8df8-798983c6f06c {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1475.504133] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-26d8ab5c-5654-41e6-9b2f-9605761bc69b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.511520] env[61868]: DEBUG oslo_vmware.api [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Waiting for the task: (returnval){ [ 1475.511520] env[61868]: value = "task-41119" [ 1475.511520] env[61868]: _type = "Task" [ 1475.511520] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.519698] env[61868]: DEBUG oslo_vmware.api [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Task: {'id': task-41119, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.947841] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1475.948232] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Creating directory with path [datastore2] vmware_temp/1eb9c5cf-7d43-42d8-bf2b-baca6ca3f5fa/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1475.948278] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7791a6bb-4f26-49c1-9782-8e944de45727 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.959942] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Created directory with path [datastore2] vmware_temp/1eb9c5cf-7d43-42d8-bf2b-baca6ca3f5fa/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1475.960150] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Fetch image to [datastore2] vmware_temp/1eb9c5cf-7d43-42d8-bf2b-baca6ca3f5fa/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1475.960320] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/1eb9c5cf-7d43-42d8-bf2b-baca6ca3f5fa/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1475.961140] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90b98c8d-74d9-4537-890b-d4d67e3de257 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.968180] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b67df19-7e4a-4d99-bb97-502a309bec08 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.978538] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-708f5bc9-24c6-4a94-8d32-0aac23c5036f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.011895] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57608734-2d57-43b1-bb12-af26fc7a32a8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.024111] env[61868]: DEBUG oslo_vmware.api [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Task: {'id': task-41119, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078948} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.024692] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1476.024891] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1476.025062] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1476.025237] env[61868]: INFO nova.compute.manager [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1476.026811] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-cdd63d13-7e08-48f0-8128-29e7ca49fbad {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.028831] env[61868]: DEBUG nova.compute.claims [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1476.029004] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1476.029218] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1476.052056] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1476.107955] env[61868]: DEBUG oslo_vmware.rw_handles [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1eb9c5cf-7d43-42d8-bf2b-baca6ca3f5fa/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1476.168204] env[61868]: DEBUG oslo_vmware.rw_handles [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1476.168430] env[61868]: DEBUG oslo_vmware.rw_handles [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1eb9c5cf-7d43-42d8-bf2b-baca6ca3f5fa/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1476.392855] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-389b981c-265e-4408-bf98-6950f07036c0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.400595] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d992d51a-f8d4-4c43-8458-a93edb6e76b4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.434798] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5005c84-f6f7-40b4-8b00-c92fc5d31b2d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.443513] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4907ce-fbe3-4b84-bef8-d6e9a8dd2fcb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.458444] env[61868]: DEBUG nova.compute.provider_tree [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1476.467361] env[61868]: DEBUG nova.scheduler.client.report [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1476.484235] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.455s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1476.484796] env[61868]: ERROR nova.compute.manager [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1476.484796] env[61868]: Faults: ['InvalidArgument'] [ 1476.484796] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Traceback (most recent call last): [ 1476.484796] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1476.484796] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] self.driver.spawn(context, instance, image_meta, [ 1476.484796] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1476.484796] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1476.484796] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1476.484796] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] self._fetch_image_if_missing(context, vi) [ 1476.484796] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1476.484796] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] image_cache(vi, tmp_image_ds_loc) [ 1476.484796] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1476.484796] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] vm_util.copy_virtual_disk( [ 1476.484796] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1476.484796] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] session._wait_for_task(vmdk_copy_task) [ 1476.484796] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1476.484796] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] return self.wait_for_task(task_ref) [ 1476.484796] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1476.484796] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] return evt.wait() [ 1476.484796] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1476.484796] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] result = hub.switch() [ 1476.484796] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1476.484796] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] return self.greenlet.switch() [ 1476.484796] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1476.484796] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] self.f(*self.args, **self.kw) [ 1476.484796] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1476.484796] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] raise exceptions.translate_fault(task_info.error) [ 1476.484796] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1476.484796] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Faults: ['InvalidArgument'] [ 1476.484796] env[61868]: ERROR nova.compute.manager [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] [ 1476.485680] env[61868]: DEBUG nova.compute.utils [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1476.486917] env[61868]: DEBUG nova.compute.manager [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Build of instance a8e7708c-b9ee-465b-8df8-798983c6f06c was re-scheduled: A specified parameter was not correct: fileType [ 1476.486917] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1476.487392] env[61868]: DEBUG nova.compute.manager [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1476.487568] env[61868]: DEBUG nova.compute.manager [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1476.487739] env[61868]: DEBUG nova.compute.manager [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1476.487905] env[61868]: DEBUG nova.network.neutron [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1476.748417] env[61868]: DEBUG nova.network.neutron [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1476.761630] env[61868]: INFO nova.compute.manager [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Took 0.27 seconds to deallocate network for instance. [ 1476.857157] env[61868]: INFO nova.scheduler.client.report [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Deleted allocations for instance a8e7708c-b9ee-465b-8df8-798983c6f06c [ 1476.879303] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c6fa9317-8d61-4b3a-858d-58f325837a54 tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Lock "a8e7708c-b9ee-465b-8df8-798983c6f06c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 538.451s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1476.880609] env[61868]: DEBUG oslo_concurrency.lockutils [None req-738a7270-3c2b-42d8-99cb-c4bad0dc21ea tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Lock "a8e7708c-b9ee-465b-8df8-798983c6f06c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 341.918s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1476.880834] env[61868]: DEBUG oslo_concurrency.lockutils [None req-738a7270-3c2b-42d8-99cb-c4bad0dc21ea tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Acquiring lock "a8e7708c-b9ee-465b-8df8-798983c6f06c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1476.881101] env[61868]: DEBUG oslo_concurrency.lockutils [None req-738a7270-3c2b-42d8-99cb-c4bad0dc21ea tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Lock "a8e7708c-b9ee-465b-8df8-798983c6f06c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1476.881274] env[61868]: DEBUG oslo_concurrency.lockutils [None req-738a7270-3c2b-42d8-99cb-c4bad0dc21ea tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Lock "a8e7708c-b9ee-465b-8df8-798983c6f06c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1476.883535] env[61868]: INFO nova.compute.manager [None req-738a7270-3c2b-42d8-99cb-c4bad0dc21ea tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Terminating instance [ 1476.886308] env[61868]: DEBUG nova.compute.manager [None req-738a7270-3c2b-42d8-99cb-c4bad0dc21ea tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1476.888149] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-738a7270-3c2b-42d8-99cb-c4bad0dc21ea tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1476.888149] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-44ee1761-bc63-4cba-9e83-ba2ec70fb82e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.893346] env[61868]: DEBUG nova.compute.manager [None req-b71e3bd4-c371-4b7e-8d04-39b99dda5084 tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: 185855b4-f378-45b6-9603-081b3c1b2c71] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1476.901478] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52e2a350-951e-459c-ac71-aca96407402c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.925174] env[61868]: DEBUG nova.compute.manager [None req-b71e3bd4-c371-4b7e-8d04-39b99dda5084 tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: 185855b4-f378-45b6-9603-081b3c1b2c71] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1476.935113] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-738a7270-3c2b-42d8-99cb-c4bad0dc21ea tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a8e7708c-b9ee-465b-8df8-798983c6f06c could not be found. [ 1476.935334] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-738a7270-3c2b-42d8-99cb-c4bad0dc21ea tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1476.935520] env[61868]: INFO nova.compute.manager [None req-738a7270-3c2b-42d8-99cb-c4bad0dc21ea tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1476.935768] env[61868]: DEBUG oslo.service.loopingcall [None req-738a7270-3c2b-42d8-99cb-c4bad0dc21ea tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1476.936276] env[61868]: DEBUG nova.compute.manager [-] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1476.936479] env[61868]: DEBUG nova.network.neutron [-] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1476.962485] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b71e3bd4-c371-4b7e-8d04-39b99dda5084 tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Lock "185855b4-f378-45b6-9603-081b3c1b2c71" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.194s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1476.967602] env[61868]: DEBUG nova.network.neutron [-] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1476.978694] env[61868]: INFO nova.compute.manager [-] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] Took 0.04 seconds to deallocate network for instance. [ 1476.980973] env[61868]: DEBUG nova.compute.manager [None req-b71e3bd4-c371-4b7e-8d04-39b99dda5084 tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: 7c578516-d2b7-4b5e-aaac-5831d2262c44] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1477.007756] env[61868]: DEBUG nova.compute.manager [None req-b71e3bd4-c371-4b7e-8d04-39b99dda5084 tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: 7c578516-d2b7-4b5e-aaac-5831d2262c44] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1477.030595] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b71e3bd4-c371-4b7e-8d04-39b99dda5084 tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Lock "7c578516-d2b7-4b5e-aaac-5831d2262c44" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.220s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1477.043648] env[61868]: DEBUG nova.compute.manager [None req-2bc4129f-81b5-4f22-9c18-0d3578b99dc8 tempest-ServersTestManualDisk-772162140 tempest-ServersTestManualDisk-772162140-project-member] [instance: 189f5d4f-7a0e-4d49-a0c9-04e886b35383] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1477.069222] env[61868]: DEBUG nova.compute.manager [None req-2bc4129f-81b5-4f22-9c18-0d3578b99dc8 tempest-ServersTestManualDisk-772162140 tempest-ServersTestManualDisk-772162140-project-member] [instance: 189f5d4f-7a0e-4d49-a0c9-04e886b35383] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1477.074617] env[61868]: DEBUG oslo_concurrency.lockutils [None req-738a7270-3c2b-42d8-99cb-c4bad0dc21ea tempest-ServerActionsTestOtherB-280984654 tempest-ServerActionsTestOtherB-280984654-project-member] Lock "a8e7708c-b9ee-465b-8df8-798983c6f06c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.194s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1477.075908] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "a8e7708c-b9ee-465b-8df8-798983c6f06c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 327.415s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1477.076218] env[61868]: INFO nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: a8e7708c-b9ee-465b-8df8-798983c6f06c] During sync_power_state the instance has a pending task (deleting). Skip. [ 1477.076437] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "a8e7708c-b9ee-465b-8df8-798983c6f06c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1477.094619] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2bc4129f-81b5-4f22-9c18-0d3578b99dc8 tempest-ServersTestManualDisk-772162140 tempest-ServersTestManualDisk-772162140-project-member] Lock "189f5d4f-7a0e-4d49-a0c9-04e886b35383" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.376s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1477.106962] env[61868]: DEBUG nova.compute.manager [None req-89d4175e-d337-4144-9493-2ae0536dbaa0 tempest-AttachVolumeTestJSON-2111320188 tempest-AttachVolumeTestJSON-2111320188-project-member] [instance: 8e01cf72-2e56-493d-8723-2e51398a7697] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1477.133764] env[61868]: DEBUG nova.compute.manager [None req-89d4175e-d337-4144-9493-2ae0536dbaa0 tempest-AttachVolumeTestJSON-2111320188 tempest-AttachVolumeTestJSON-2111320188-project-member] [instance: 8e01cf72-2e56-493d-8723-2e51398a7697] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1477.160943] env[61868]: DEBUG oslo_concurrency.lockutils [None req-89d4175e-d337-4144-9493-2ae0536dbaa0 tempest-AttachVolumeTestJSON-2111320188 tempest-AttachVolumeTestJSON-2111320188-project-member] Lock "8e01cf72-2e56-493d-8723-2e51398a7697" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.904s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1477.173953] env[61868]: DEBUG nova.compute.manager [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1477.243439] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1477.243704] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1477.245773] env[61868]: INFO nova.compute.claims [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1477.584393] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25957c3c-6e13-478e-923a-910a4944ad92 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.593321] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8c0b398-19f3-498f-bd71-aee9b60b3266 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.624891] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9583b451-9720-4987-9f64-201200dfd5c6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.632989] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084a8055-c006-4512-b4f4-26c8595914f6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.647008] env[61868]: DEBUG nova.compute.provider_tree [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1477.656235] env[61868]: DEBUG nova.scheduler.client.report [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1477.676913] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.433s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1477.677403] env[61868]: DEBUG nova.compute.manager [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1477.711357] env[61868]: DEBUG nova.compute.utils [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1477.712630] env[61868]: DEBUG nova.compute.manager [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1477.712799] env[61868]: DEBUG nova.network.neutron [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1477.723689] env[61868]: DEBUG nova.compute.manager [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1477.767696] env[61868]: DEBUG nova.policy [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '424b508614194ac2ad15e8cb62f2d041', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6f518980782c4dc5ac6efe31af19af16', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 1477.800194] env[61868]: DEBUG nova.compute.manager [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1477.824366] env[61868]: DEBUG nova.virt.hardware [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1477.824629] env[61868]: DEBUG nova.virt.hardware [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1477.824788] env[61868]: DEBUG nova.virt.hardware [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1477.825032] env[61868]: DEBUG nova.virt.hardware [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1477.825215] env[61868]: DEBUG nova.virt.hardware [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1477.825366] env[61868]: DEBUG nova.virt.hardware [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1477.825591] env[61868]: DEBUG nova.virt.hardware [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1477.825753] env[61868]: DEBUG nova.virt.hardware [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1477.825927] env[61868]: DEBUG nova.virt.hardware [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1477.826088] env[61868]: DEBUG nova.virt.hardware [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1477.826261] env[61868]: DEBUG nova.virt.hardware [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1477.827150] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e1a5c89-8cd4-4d1b-a8da-8f8a4356342b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.835861] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-293d6c17-0f12-4746-b5dc-c3aa627f7d40 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.044283] env[61868]: DEBUG nova.network.neutron [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Successfully created port: cf9bf805-41cf-448d-a0f9-2b1a47751b22 {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1478.574591] env[61868]: DEBUG nova.network.neutron [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Successfully updated port: cf9bf805-41cf-448d-a0f9-2b1a47751b22 {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1478.590509] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "refresh_cache-95ba0df5-846c-4e5d-94e6-fd9c43dcc191" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1478.590509] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquired lock "refresh_cache-95ba0df5-846c-4e5d-94e6-fd9c43dcc191" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1478.590509] env[61868]: DEBUG nova.network.neutron [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1478.638554] env[61868]: DEBUG nova.network.neutron [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1478.787788] env[61868]: DEBUG nova.compute.manager [req-751f1a9b-3c7f-44e5-9a84-9fc9d5aba2d4 req-865a3f03-7206-4c61-9674-11d69b2eaf79 service nova] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Received event network-vif-plugged-cf9bf805-41cf-448d-a0f9-2b1a47751b22 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1478.788147] env[61868]: DEBUG oslo_concurrency.lockutils [req-751f1a9b-3c7f-44e5-9a84-9fc9d5aba2d4 req-865a3f03-7206-4c61-9674-11d69b2eaf79 service nova] Acquiring lock "95ba0df5-846c-4e5d-94e6-fd9c43dcc191-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1478.788684] env[61868]: DEBUG oslo_concurrency.lockutils [req-751f1a9b-3c7f-44e5-9a84-9fc9d5aba2d4 req-865a3f03-7206-4c61-9674-11d69b2eaf79 service nova] Lock "95ba0df5-846c-4e5d-94e6-fd9c43dcc191-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1478.789096] env[61868]: DEBUG oslo_concurrency.lockutils [req-751f1a9b-3c7f-44e5-9a84-9fc9d5aba2d4 req-865a3f03-7206-4c61-9674-11d69b2eaf79 service nova] Lock "95ba0df5-846c-4e5d-94e6-fd9c43dcc191-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1478.789267] env[61868]: DEBUG nova.compute.manager [req-751f1a9b-3c7f-44e5-9a84-9fc9d5aba2d4 req-865a3f03-7206-4c61-9674-11d69b2eaf79 service nova] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] No waiting events found dispatching network-vif-plugged-cf9bf805-41cf-448d-a0f9-2b1a47751b22 {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1478.789452] env[61868]: WARNING nova.compute.manager [req-751f1a9b-3c7f-44e5-9a84-9fc9d5aba2d4 req-865a3f03-7206-4c61-9674-11d69b2eaf79 service nova] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Received unexpected event network-vif-plugged-cf9bf805-41cf-448d-a0f9-2b1a47751b22 for instance with vm_state building and task_state spawning. [ 1478.789614] env[61868]: DEBUG nova.compute.manager [req-751f1a9b-3c7f-44e5-9a84-9fc9d5aba2d4 req-865a3f03-7206-4c61-9674-11d69b2eaf79 service nova] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Received event network-changed-cf9bf805-41cf-448d-a0f9-2b1a47751b22 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1478.789769] env[61868]: DEBUG nova.compute.manager [req-751f1a9b-3c7f-44e5-9a84-9fc9d5aba2d4 req-865a3f03-7206-4c61-9674-11d69b2eaf79 service nova] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Refreshing instance network info cache due to event network-changed-cf9bf805-41cf-448d-a0f9-2b1a47751b22. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1478.789940] env[61868]: DEBUG oslo_concurrency.lockutils [req-751f1a9b-3c7f-44e5-9a84-9fc9d5aba2d4 req-865a3f03-7206-4c61-9674-11d69b2eaf79 service nova] Acquiring lock "refresh_cache-95ba0df5-846c-4e5d-94e6-fd9c43dcc191" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1478.802467] env[61868]: DEBUG nova.network.neutron [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Updating instance_info_cache with network_info: [{"id": "cf9bf805-41cf-448d-a0f9-2b1a47751b22", "address": "fa:16:3e:25:bb:98", "network": {"id": "6da51a65-4f1d-44b5-8bb8-b049cebe1cc1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2119750877-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "6f518980782c4dc5ac6efe31af19af16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf9bf805-41", "ovs_interfaceid": "cf9bf805-41cf-448d-a0f9-2b1a47751b22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1478.819363] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Releasing lock "refresh_cache-95ba0df5-846c-4e5d-94e6-fd9c43dcc191" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1478.819690] env[61868]: DEBUG nova.compute.manager [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Instance network_info: |[{"id": "cf9bf805-41cf-448d-a0f9-2b1a47751b22", "address": "fa:16:3e:25:bb:98", "network": {"id": "6da51a65-4f1d-44b5-8bb8-b049cebe1cc1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2119750877-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "6f518980782c4dc5ac6efe31af19af16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf9bf805-41", "ovs_interfaceid": "cf9bf805-41cf-448d-a0f9-2b1a47751b22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1478.820031] env[61868]: DEBUG oslo_concurrency.lockutils [req-751f1a9b-3c7f-44e5-9a84-9fc9d5aba2d4 req-865a3f03-7206-4c61-9674-11d69b2eaf79 service nova] Acquired lock "refresh_cache-95ba0df5-846c-4e5d-94e6-fd9c43dcc191" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1478.820222] env[61868]: DEBUG nova.network.neutron [req-751f1a9b-3c7f-44e5-9a84-9fc9d5aba2d4 req-865a3f03-7206-4c61-9674-11d69b2eaf79 service nova] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Refreshing network info cache for port cf9bf805-41cf-448d-a0f9-2b1a47751b22 {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1478.821358] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:25:bb:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c1b8689-a9b4-4972-beb9-6a1c8de1dc88', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf9bf805-41cf-448d-a0f9-2b1a47751b22', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1478.828801] env[61868]: DEBUG oslo.service.loopingcall [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1478.830045] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1478.832399] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a7c82e22-138e-475f-8fbb-eb1d43f51bfb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.853701] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1478.853701] env[61868]: value = "task-41120" [ 1478.853701] env[61868]: _type = "Task" [ 1478.853701] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.865398] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41120, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.292678] env[61868]: DEBUG nova.network.neutron [req-751f1a9b-3c7f-44e5-9a84-9fc9d5aba2d4 req-865a3f03-7206-4c61-9674-11d69b2eaf79 service nova] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Updated VIF entry in instance network info cache for port cf9bf805-41cf-448d-a0f9-2b1a47751b22. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1479.293268] env[61868]: DEBUG nova.network.neutron [req-751f1a9b-3c7f-44e5-9a84-9fc9d5aba2d4 req-865a3f03-7206-4c61-9674-11d69b2eaf79 service nova] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Updating instance_info_cache with network_info: [{"id": "cf9bf805-41cf-448d-a0f9-2b1a47751b22", "address": "fa:16:3e:25:bb:98", "network": {"id": "6da51a65-4f1d-44b5-8bb8-b049cebe1cc1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2119750877-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "6f518980782c4dc5ac6efe31af19af16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf9bf805-41", "ovs_interfaceid": "cf9bf805-41cf-448d-a0f9-2b1a47751b22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1479.303238] env[61868]: DEBUG oslo_concurrency.lockutils [req-751f1a9b-3c7f-44e5-9a84-9fc9d5aba2d4 req-865a3f03-7206-4c61-9674-11d69b2eaf79 service nova] Releasing lock "refresh_cache-95ba0df5-846c-4e5d-94e6-fd9c43dcc191" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1479.364377] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41120, 'name': CreateVM_Task, 'duration_secs': 0.298979} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.364558] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1479.365090] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1479.365327] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1479.368233] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7652f5bd-0cd7-4f6f-aa0c-aba914540629 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.401635] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Reconfiguring VM instance to enable vnc on port - 5909 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 1479.402061] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4632d606-18b5-4378-a0ff-e12ab972c957 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.418684] env[61868]: DEBUG oslo_vmware.api [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for the task: (returnval){ [ 1479.418684] env[61868]: value = "task-41121" [ 1479.418684] env[61868]: _type = "Task" [ 1479.418684] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.428566] env[61868]: DEBUG oslo_vmware.api [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': task-41121, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.928981] env[61868]: DEBUG oslo_vmware.api [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': task-41121, 'name': ReconfigVM_Task, 'duration_secs': 0.106281} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.929354] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Reconfigured VM instance to enable vnc on port - 5909 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 1479.929634] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.564s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1479.929948] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1479.930158] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1479.930534] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1479.930895] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee5cebf7-61c2-4462-9123-abd9668940e2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.936441] env[61868]: DEBUG oslo_vmware.api [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for the task: (returnval){ [ 1479.936441] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]5272e9d1-47e2-cbe9-2fc9-ea098004506f" [ 1479.936441] env[61868]: _type = "Task" [ 1479.936441] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.945328] env[61868]: DEBUG oslo_vmware.api [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]5272e9d1-47e2-cbe9-2fc9-ea098004506f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.447324] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1480.447572] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1480.447676] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1480.483725] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ad019fbd-f373-4808-9c8d-14078d5f26a2 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "f85e2a53-adea-4581-b453-2b96af0ebc70" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1480.483945] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ad019fbd-f373-4808-9c8d-14078d5f26a2 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "f85e2a53-adea-4581-b453-2b96af0ebc70" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1487.351681] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1489.806863] env[61868]: DEBUG oslo_concurrency.lockutils [None req-dbdc02e9-97a2-4cb1-903f-46b494cd007f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "95ba0df5-846c-4e5d-94e6-fd9c43dcc191" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1492.352956] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1492.352956] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1492.352956] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 1492.373800] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1492.373974] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1492.374100] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1492.374233] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1492.374362] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1492.374484] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1492.374607] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1492.374730] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1492.374853] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1492.374974] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1492.375097] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 1494.371043] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1495.351553] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1495.351792] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1496.347478] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1496.375901] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1496.397191] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1496.397422] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1496.397584] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1496.397733] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1496.398988] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e46bd6d-1ef3-4ff2-997b-34541ac2a703 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.410630] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a3c2369-eb48-4d92-9f06-18ae7a426566 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.426614] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62960c35-5ad9-4b08-baf1-1136b4b71d70 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.443882] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d5b58e-edff-4b0c-9c77-107993b961d2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.481723] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181933MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1496.481885] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1496.482111] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1496.560162] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance a7371133-1ff3-4016-84fc-a59a9ef6d445 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1496.560538] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance b2dbce45-4bfa-4356-b608-e44e5a15c081 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1496.560931] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1496.561194] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance f7e1c6c5-752e-4fef-b84f-232b2dbee4a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1496.561428] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance eafa3522-51e3-4582-b060-3e3ac4224ae2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1496.561646] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance c9f74904-0558-42e6-a454-c7103b2873b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1496.561895] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance efb3f108-d3b3-4ebf-a51f-84dc8274f857 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1496.562136] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ebd4070e-7944-4d2f-8668-01d0ceca0c67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1496.562495] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d6f7828e-6617-40ca-9f6c-e3a72c328dc9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1496.562713] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 95ba0df5-846c-4e5d-94e6-fd9c43dcc191 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1496.577994] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d69cb3f5-b385-432a-b562-87d0b1b0877b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1496.590638] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 352b5eae-b809-4cb5-8970-ebad9fba78bc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1496.612596] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 11a56664-d4bc-4090-8de0-52cfeb8f37e3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1496.627033] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 97840d8b-90ee-432e-988a-30548b61381b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1496.642258] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 51d7ab97-1a1c-49e6-a989-cb1b49e30d2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1496.658208] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance f92ea472-0354-45a7-a29a-569faaf460f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1496.678743] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance b1473dd0-5427-496c-a94c-5772635b229f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1496.692161] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 19162a19-9d7c-49b7-ad55-948d2126a61b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1496.704935] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 0628d2ec-4e42-4bd2-a819-a6f8e2252469 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1496.717621] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance f85e2a53-adea-4581-b453-2b96af0ebc70 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1496.717916] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1496.718027] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1497.016302] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e2d1b09-3efd-4d1c-a3d7-9b98c04b85fb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.024752] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e240239-c4aa-4403-b98d-52921c18bc54 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.057107] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a32ff21-3207-4a44-9b9c-3f90dfe431b6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.065665] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dabc4afb-5741-4f72-9b6c-f2d063ee27b5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.082243] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1497.091759] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1497.113087] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1497.113493] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.631s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1499.089467] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1501.351226] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1501.351530] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1501.351613] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 1525.024092] env[61868]: WARNING oslo_vmware.rw_handles [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1525.024092] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1525.024092] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1525.024092] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1525.024092] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1525.024092] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 1525.024092] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1525.024092] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1525.024092] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1525.024092] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1525.024092] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1525.024092] env[61868]: ERROR oslo_vmware.rw_handles [ 1525.025043] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/1eb9c5cf-7d43-42d8-bf2b-baca6ca3f5fa/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1525.026497] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1525.026732] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Copying Virtual Disk [datastore2] vmware_temp/1eb9c5cf-7d43-42d8-bf2b-baca6ca3f5fa/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/1eb9c5cf-7d43-42d8-bf2b-baca6ca3f5fa/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1525.027167] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-539f79d4-b4e6-4f70-b171-bc84fafd7fad {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.037601] env[61868]: DEBUG oslo_vmware.api [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Waiting for the task: (returnval){ [ 1525.037601] env[61868]: value = "task-41122" [ 1525.037601] env[61868]: _type = "Task" [ 1525.037601] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.047070] env[61868]: DEBUG oslo_vmware.api [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Task: {'id': task-41122, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.548033] env[61868]: DEBUG oslo_vmware.exceptions [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1525.548343] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1525.548912] env[61868]: ERROR nova.compute.manager [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1525.548912] env[61868]: Faults: ['InvalidArgument'] [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Traceback (most recent call last): [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] yield resources [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] self.driver.spawn(context, instance, image_meta, [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] self._fetch_image_if_missing(context, vi) [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] image_cache(vi, tmp_image_ds_loc) [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] vm_util.copy_virtual_disk( [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] session._wait_for_task(vmdk_copy_task) [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] return self.wait_for_task(task_ref) [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] return evt.wait() [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] result = hub.switch() [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] return self.greenlet.switch() [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] self.f(*self.args, **self.kw) [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] raise exceptions.translate_fault(task_info.error) [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Faults: ['InvalidArgument'] [ 1525.548912] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] [ 1525.550039] env[61868]: INFO nova.compute.manager [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Terminating instance [ 1525.550782] env[61868]: DEBUG oslo_concurrency.lockutils [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1525.551014] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1525.551254] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0cbb8764-dbcc-447c-b9d7-dddc46824008 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.553666] env[61868]: DEBUG nova.compute.manager [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1525.553863] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1525.554606] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e2c8539-a1d0-4ae7-901e-ad53017cd592 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.564253] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1525.565355] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fefcc54d-5858-4cfe-a085-5fb4442ac748 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.566922] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1525.567140] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1525.567894] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cec0d71c-68ab-4f53-93f5-56d6718f58b7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.574266] env[61868]: DEBUG oslo_vmware.api [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Waiting for the task: (returnval){ [ 1525.574266] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52e1f5ec-18cb-02fe-ec5e-26557a3b4729" [ 1525.574266] env[61868]: _type = "Task" [ 1525.574266] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.582240] env[61868]: DEBUG oslo_vmware.api [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52e1f5ec-18cb-02fe-ec5e-26557a3b4729, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.638624] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1525.638954] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1525.639447] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Deleting the datastore file [datastore2] a7371133-1ff3-4016-84fc-a59a9ef6d445 {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1525.639614] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c7235adf-efb3-4a94-8b6e-c7c18d689dab {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.648041] env[61868]: DEBUG oslo_vmware.api [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Waiting for the task: (returnval){ [ 1525.648041] env[61868]: value = "task-41124" [ 1525.648041] env[61868]: _type = "Task" [ 1525.648041] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.656716] env[61868]: DEBUG oslo_vmware.api [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Task: {'id': task-41124, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.085397] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1526.085777] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Creating directory with path [datastore2] vmware_temp/59791cab-c01d-45f8-9547-1057cd6ab90f/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1526.086030] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8634433c-e50d-43ec-98b6-7ef988f1732c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.100441] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Created directory with path [datastore2] vmware_temp/59791cab-c01d-45f8-9547-1057cd6ab90f/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1526.100687] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Fetch image to [datastore2] vmware_temp/59791cab-c01d-45f8-9547-1057cd6ab90f/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1526.100941] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/59791cab-c01d-45f8-9547-1057cd6ab90f/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1526.101801] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c486c2c-7031-41b1-b8c9-daa1e2eb390a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.108949] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36cfa75c-6d99-4696-8796-6644ed01ce27 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.118689] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50acfcbc-fbb4-42cf-b99e-c8eba1d7f4d3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.158560] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109eb3f7-059f-4dd5-b065-25754646f5f7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.168260] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-bd392b4c-dcb6-4408-b76b-8b4587861fcf {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.169982] env[61868]: DEBUG oslo_vmware.api [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Task: {'id': task-41124, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102102} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.170288] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1526.170503] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1526.170714] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1526.170953] env[61868]: INFO nova.compute.manager [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1526.178578] env[61868]: DEBUG nova.compute.claims [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1526.178578] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1526.178578] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1526.194362] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1526.252165] env[61868]: DEBUG oslo_vmware.rw_handles [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/59791cab-c01d-45f8-9547-1057cd6ab90f/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1526.310558] env[61868]: DEBUG oslo_vmware.rw_handles [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1526.310842] env[61868]: DEBUG oslo_vmware.rw_handles [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/59791cab-c01d-45f8-9547-1057cd6ab90f/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1526.545841] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb1c1eb-1d79-4574-b0c5-a908c0e96240 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.555563] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-291a028a-d673-461f-937e-79eb776ad535 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.585900] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a965f02-b48b-4c07-becc-333e9bfac494 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.593637] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c3f0b0c-0770-419f-879d-a009ae592525 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.607383] env[61868]: DEBUG nova.compute.provider_tree [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1526.617976] env[61868]: DEBUG nova.scheduler.client.report [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1526.634181] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.456s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1526.634674] env[61868]: ERROR nova.compute.manager [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1526.634674] env[61868]: Faults: ['InvalidArgument'] [ 1526.634674] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Traceback (most recent call last): [ 1526.634674] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1526.634674] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] self.driver.spawn(context, instance, image_meta, [ 1526.634674] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1526.634674] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1526.634674] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1526.634674] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] self._fetch_image_if_missing(context, vi) [ 1526.634674] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1526.634674] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] image_cache(vi, tmp_image_ds_loc) [ 1526.634674] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1526.634674] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] vm_util.copy_virtual_disk( [ 1526.634674] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1526.634674] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] session._wait_for_task(vmdk_copy_task) [ 1526.634674] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1526.634674] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] return self.wait_for_task(task_ref) [ 1526.634674] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1526.634674] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] return evt.wait() [ 1526.634674] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1526.634674] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] result = hub.switch() [ 1526.634674] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1526.634674] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] return self.greenlet.switch() [ 1526.634674] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1526.634674] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] self.f(*self.args, **self.kw) [ 1526.634674] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1526.634674] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] raise exceptions.translate_fault(task_info.error) [ 1526.634674] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1526.634674] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Faults: ['InvalidArgument'] [ 1526.634674] env[61868]: ERROR nova.compute.manager [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] [ 1526.635648] env[61868]: DEBUG nova.compute.utils [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1526.637115] env[61868]: DEBUG nova.compute.manager [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Build of instance a7371133-1ff3-4016-84fc-a59a9ef6d445 was re-scheduled: A specified parameter was not correct: fileType [ 1526.637115] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1526.637492] env[61868]: DEBUG nova.compute.manager [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1526.637665] env[61868]: DEBUG nova.compute.manager [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1526.637828] env[61868]: DEBUG nova.compute.manager [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1526.637987] env[61868]: DEBUG nova.network.neutron [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1526.904034] env[61868]: DEBUG nova.network.neutron [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1526.917234] env[61868]: INFO nova.compute.manager [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Took 0.28 seconds to deallocate network for instance. [ 1527.028097] env[61868]: INFO nova.scheduler.client.report [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Deleted allocations for instance a7371133-1ff3-4016-84fc-a59a9ef6d445 [ 1527.047960] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56c681d0-adc0-46c1-88a4-803b466c34e0 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Lock "a7371133-1ff3-4016-84fc-a59a9ef6d445" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 543.118s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1527.049192] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "a7371133-1ff3-4016-84fc-a59a9ef6d445" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 377.388s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1527.049378] env[61868]: INFO nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] During sync_power_state the instance has a pending task (spawning). Skip. [ 1527.049544] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "a7371133-1ff3-4016-84fc-a59a9ef6d445" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1527.051048] env[61868]: DEBUG oslo_concurrency.lockutils [None req-628441ca-9aea-47b8-b2f6-7ca0884b0a10 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Lock "a7371133-1ff3-4016-84fc-a59a9ef6d445" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 347.332s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1527.051294] env[61868]: DEBUG oslo_concurrency.lockutils [None req-628441ca-9aea-47b8-b2f6-7ca0884b0a10 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Acquiring lock "a7371133-1ff3-4016-84fc-a59a9ef6d445-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1527.051559] env[61868]: DEBUG oslo_concurrency.lockutils [None req-628441ca-9aea-47b8-b2f6-7ca0884b0a10 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Lock "a7371133-1ff3-4016-84fc-a59a9ef6d445-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1527.051697] env[61868]: DEBUG oslo_concurrency.lockutils [None req-628441ca-9aea-47b8-b2f6-7ca0884b0a10 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Lock "a7371133-1ff3-4016-84fc-a59a9ef6d445-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1527.053954] env[61868]: INFO nova.compute.manager [None req-628441ca-9aea-47b8-b2f6-7ca0884b0a10 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Terminating instance [ 1527.056222] env[61868]: DEBUG nova.compute.manager [None req-628441ca-9aea-47b8-b2f6-7ca0884b0a10 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1527.056511] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-628441ca-9aea-47b8-b2f6-7ca0884b0a10 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1527.056850] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-161107df-ded7-4863-b1f7-17a80cfbccfc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.062149] env[61868]: DEBUG nova.compute.manager [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1527.068848] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71fa8abd-da65-4f66-929f-b8750f5a1077 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.099705] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-628441ca-9aea-47b8-b2f6-7ca0884b0a10 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a7371133-1ff3-4016-84fc-a59a9ef6d445 could not be found. [ 1527.100064] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-628441ca-9aea-47b8-b2f6-7ca0884b0a10 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1527.100126] env[61868]: INFO nova.compute.manager [None req-628441ca-9aea-47b8-b2f6-7ca0884b0a10 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1527.100397] env[61868]: DEBUG oslo.service.loopingcall [None req-628441ca-9aea-47b8-b2f6-7ca0884b0a10 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1527.101262] env[61868]: DEBUG nova.compute.manager [-] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1527.101366] env[61868]: DEBUG nova.network.neutron [-] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1527.119590] env[61868]: DEBUG oslo_concurrency.lockutils [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1527.119871] env[61868]: DEBUG oslo_concurrency.lockutils [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1527.121597] env[61868]: INFO nova.compute.claims [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1527.139667] env[61868]: DEBUG nova.network.neutron [-] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1527.156735] env[61868]: INFO nova.compute.manager [-] [instance: a7371133-1ff3-4016-84fc-a59a9ef6d445] Took 0.06 seconds to deallocate network for instance. [ 1527.308206] env[61868]: DEBUG oslo_concurrency.lockutils [None req-628441ca-9aea-47b8-b2f6-7ca0884b0a10 tempest-InstanceActionsV221TestJSON-1625642940 tempest-InstanceActionsV221TestJSON-1625642940-project-member] Lock "a7371133-1ff3-4016-84fc-a59a9ef6d445" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.257s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1527.459884] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7e646f-9b97-46b7-8730-0ab738c761e0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.468658] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6838a2-0759-4807-90bb-2f4e2cb582f1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.498312] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39e048a3-327a-46c8-acdd-4fdb0d5bc8d4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.506810] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673e99a4-b263-4ca0-bf06-ec8dd8c0c5d7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.530188] env[61868]: DEBUG nova.compute.provider_tree [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1527.541991] env[61868]: DEBUG nova.scheduler.client.report [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1527.563422] env[61868]: DEBUG oslo_concurrency.lockutils [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.443s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1527.563914] env[61868]: DEBUG nova.compute.manager [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1527.596954] env[61868]: DEBUG nova.compute.utils [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1527.599113] env[61868]: DEBUG nova.compute.manager [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1527.599390] env[61868]: DEBUG nova.network.neutron [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1527.608711] env[61868]: DEBUG nova.compute.manager [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1527.674771] env[61868]: DEBUG nova.policy [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '00e72af3b9f44d1fb85175d102514099', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '20b6021bf3ad4926b12172b01abb66df', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 1527.688694] env[61868]: DEBUG nova.compute.manager [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1527.711122] env[61868]: DEBUG nova.virt.hardware [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1527.711254] env[61868]: DEBUG nova.virt.hardware [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1527.711370] env[61868]: DEBUG nova.virt.hardware [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1527.711550] env[61868]: DEBUG nova.virt.hardware [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1527.711696] env[61868]: DEBUG nova.virt.hardware [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1527.711838] env[61868]: DEBUG nova.virt.hardware [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1527.712062] env[61868]: DEBUG nova.virt.hardware [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1527.712237] env[61868]: DEBUG nova.virt.hardware [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1527.712407] env[61868]: DEBUG nova.virt.hardware [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1527.712569] env[61868]: DEBUG nova.virt.hardware [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1527.712743] env[61868]: DEBUG nova.virt.hardware [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1527.713620] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f00f1d0-70de-4412-91d6-914cbcf1fdd5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.722467] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f724344f-365e-4e85-8869-7c70d46ce1af {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.099573] env[61868]: DEBUG nova.network.neutron [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Successfully created port: dc289594-c992-4a94-b2b5-70c35f8bc2bb {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1529.120553] env[61868]: DEBUG nova.compute.manager [req-944b5f88-ceed-4fea-b897-0b7103b7359b req-e2136d10-f278-49c9-81bc-976cd99f64e2 service nova] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Received event network-vif-plugged-dc289594-c992-4a94-b2b5-70c35f8bc2bb {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1529.120817] env[61868]: DEBUG oslo_concurrency.lockutils [req-944b5f88-ceed-4fea-b897-0b7103b7359b req-e2136d10-f278-49c9-81bc-976cd99f64e2 service nova] Acquiring lock "d69cb3f5-b385-432a-b562-87d0b1b0877b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1529.121021] env[61868]: DEBUG oslo_concurrency.lockutils [req-944b5f88-ceed-4fea-b897-0b7103b7359b req-e2136d10-f278-49c9-81bc-976cd99f64e2 service nova] Lock "d69cb3f5-b385-432a-b562-87d0b1b0877b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1529.121195] env[61868]: DEBUG oslo_concurrency.lockutils [req-944b5f88-ceed-4fea-b897-0b7103b7359b req-e2136d10-f278-49c9-81bc-976cd99f64e2 service nova] Lock "d69cb3f5-b385-432a-b562-87d0b1b0877b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1529.121361] env[61868]: DEBUG nova.compute.manager [req-944b5f88-ceed-4fea-b897-0b7103b7359b req-e2136d10-f278-49c9-81bc-976cd99f64e2 service nova] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] No waiting events found dispatching network-vif-plugged-dc289594-c992-4a94-b2b5-70c35f8bc2bb {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1529.121519] env[61868]: WARNING nova.compute.manager [req-944b5f88-ceed-4fea-b897-0b7103b7359b req-e2136d10-f278-49c9-81bc-976cd99f64e2 service nova] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Received unexpected event network-vif-plugged-dc289594-c992-4a94-b2b5-70c35f8bc2bb for instance with vm_state building and task_state spawning. [ 1529.279441] env[61868]: DEBUG nova.network.neutron [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Successfully updated port: dc289594-c992-4a94-b2b5-70c35f8bc2bb {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1529.291005] env[61868]: DEBUG oslo_concurrency.lockutils [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Acquiring lock "refresh_cache-d69cb3f5-b385-432a-b562-87d0b1b0877b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1529.291172] env[61868]: DEBUG oslo_concurrency.lockutils [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Acquired lock "refresh_cache-d69cb3f5-b385-432a-b562-87d0b1b0877b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1529.291320] env[61868]: DEBUG nova.network.neutron [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1529.373888] env[61868]: DEBUG nova.network.neutron [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1529.622331] env[61868]: DEBUG nova.network.neutron [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Updating instance_info_cache with network_info: [{"id": "dc289594-c992-4a94-b2b5-70c35f8bc2bb", "address": "fa:16:3e:de:85:e7", "network": {"id": "537d7081-e44c-4edb-86e4-6fbca8ba72fc", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2073255524-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "20b6021bf3ad4926b12172b01abb66df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc289594-c9", "ovs_interfaceid": "dc289594-c992-4a94-b2b5-70c35f8bc2bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1529.637006] env[61868]: DEBUG oslo_concurrency.lockutils [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Releasing lock "refresh_cache-d69cb3f5-b385-432a-b562-87d0b1b0877b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1529.637347] env[61868]: DEBUG nova.compute.manager [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Instance network_info: |[{"id": "dc289594-c992-4a94-b2b5-70c35f8bc2bb", "address": "fa:16:3e:de:85:e7", "network": {"id": "537d7081-e44c-4edb-86e4-6fbca8ba72fc", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2073255524-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "20b6021bf3ad4926b12172b01abb66df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc289594-c9", "ovs_interfaceid": "dc289594-c992-4a94-b2b5-70c35f8bc2bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1529.638101] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:85:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2907cce-d529-4809-af05-d29397bed211', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dc289594-c992-4a94-b2b5-70c35f8bc2bb', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1529.646387] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Creating folder: Project (20b6021bf3ad4926b12172b01abb66df). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1529.647010] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e9ba1220-f4d2-43e4-ad8a-7081fef25d9d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.659216] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Created folder: Project (20b6021bf3ad4926b12172b01abb66df) in parent group-v18181. [ 1529.659420] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Creating folder: Instances. Parent ref: group-v18282. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1529.659671] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-68e1a0b5-6ff0-460d-8e19-01cfaf6f1089 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.669952] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Created folder: Instances in parent group-v18282. [ 1529.670209] env[61868]: DEBUG oslo.service.loopingcall [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1529.670406] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1529.670617] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a90c293f-2b9c-4bca-a2de-38cb3d26218a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.703434] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1529.703434] env[61868]: value = "task-41127" [ 1529.703434] env[61868]: _type = "Task" [ 1529.703434] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.713341] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41127, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.213965] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41127, 'name': CreateVM_Task, 'duration_secs': 0.352615} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.214246] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1530.214760] env[61868]: DEBUG oslo_concurrency.lockutils [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1530.214984] env[61868]: DEBUG oslo_concurrency.lockutils [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1530.218433] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0efa7a93-d9ae-40e3-959f-f629f92f337e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.263884] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Reconfiguring VM instance to enable vnc on port - 5908 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 1530.264332] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-424fa22f-4514-403b-98a8-ebe91acee3dc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.281816] env[61868]: DEBUG oslo_vmware.api [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Waiting for the task: (returnval){ [ 1530.281816] env[61868]: value = "task-41128" [ 1530.281816] env[61868]: _type = "Task" [ 1530.281816] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.292613] env[61868]: DEBUG oslo_vmware.api [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Task: {'id': task-41128, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.792903] env[61868]: DEBUG oslo_vmware.api [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Task: {'id': task-41128, 'name': ReconfigVM_Task, 'duration_secs': 0.136955} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.793252] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Reconfigured VM instance to enable vnc on port - 5908 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 1530.793498] env[61868]: DEBUG oslo_concurrency.lockutils [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.578s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1530.793761] env[61868]: DEBUG oslo_concurrency.lockutils [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1530.793906] env[61868]: DEBUG oslo_concurrency.lockutils [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1530.794220] env[61868]: DEBUG oslo_concurrency.lockutils [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1530.794487] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49a69906-b4d8-4e6e-8496-876f93fc336f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.800372] env[61868]: DEBUG oslo_vmware.api [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Waiting for the task: (returnval){ [ 1530.800372] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]528776c7-d33b-3eb7-9335-432079a8f560" [ 1530.800372] env[61868]: _type = "Task" [ 1530.800372] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.810104] env[61868]: DEBUG oslo_vmware.api [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]528776c7-d33b-3eb7-9335-432079a8f560, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.149787] env[61868]: DEBUG nova.compute.manager [req-8f22372d-129c-440f-b5c1-7d38692ba27d req-02c13391-8b46-4a15-b8a0-d24e2cc2bbc2 service nova] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Received event network-changed-dc289594-c992-4a94-b2b5-70c35f8bc2bb {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1531.149980] env[61868]: DEBUG nova.compute.manager [req-8f22372d-129c-440f-b5c1-7d38692ba27d req-02c13391-8b46-4a15-b8a0-d24e2cc2bbc2 service nova] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Refreshing instance network info cache due to event network-changed-dc289594-c992-4a94-b2b5-70c35f8bc2bb. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1531.150186] env[61868]: DEBUG oslo_concurrency.lockutils [req-8f22372d-129c-440f-b5c1-7d38692ba27d req-02c13391-8b46-4a15-b8a0-d24e2cc2bbc2 service nova] Acquiring lock "refresh_cache-d69cb3f5-b385-432a-b562-87d0b1b0877b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1531.150320] env[61868]: DEBUG oslo_concurrency.lockutils [req-8f22372d-129c-440f-b5c1-7d38692ba27d req-02c13391-8b46-4a15-b8a0-d24e2cc2bbc2 service nova] Acquired lock "refresh_cache-d69cb3f5-b385-432a-b562-87d0b1b0877b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1531.150472] env[61868]: DEBUG nova.network.neutron [req-8f22372d-129c-440f-b5c1-7d38692ba27d req-02c13391-8b46-4a15-b8a0-d24e2cc2bbc2 service nova] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Refreshing network info cache for port dc289594-c992-4a94-b2b5-70c35f8bc2bb {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1531.315460] env[61868]: DEBUG oslo_concurrency.lockutils [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1531.315813] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1531.315938] env[61868]: DEBUG oslo_concurrency.lockutils [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1531.516441] env[61868]: DEBUG nova.network.neutron [req-8f22372d-129c-440f-b5c1-7d38692ba27d req-02c13391-8b46-4a15-b8a0-d24e2cc2bbc2 service nova] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Updated VIF entry in instance network info cache for port dc289594-c992-4a94-b2b5-70c35f8bc2bb. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1531.516750] env[61868]: DEBUG nova.network.neutron [req-8f22372d-129c-440f-b5c1-7d38692ba27d req-02c13391-8b46-4a15-b8a0-d24e2cc2bbc2 service nova] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Updating instance_info_cache with network_info: [{"id": "dc289594-c992-4a94-b2b5-70c35f8bc2bb", "address": "fa:16:3e:de:85:e7", "network": {"id": "537d7081-e44c-4edb-86e4-6fbca8ba72fc", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2073255524-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "20b6021bf3ad4926b12172b01abb66df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc289594-c9", "ovs_interfaceid": "dc289594-c992-4a94-b2b5-70c35f8bc2bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1531.528671] env[61868]: DEBUG oslo_concurrency.lockutils [req-8f22372d-129c-440f-b5c1-7d38692ba27d req-02c13391-8b46-4a15-b8a0-d24e2cc2bbc2 service nova] Releasing lock "refresh_cache-d69cb3f5-b385-432a-b562-87d0b1b0877b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1533.904065] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a5b0a3c7-96df-4b0b-8f5c-93cc0eb74c8d tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Acquiring lock "d69cb3f5-b385-432a-b562-87d0b1b0877b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1536.843440] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Acquiring lock "4a360bf5-a22e-4e94-9274-be83ea0f6e5f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1536.843828] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Lock "4a360bf5-a22e-4e94-9274-be83ea0f6e5f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1547.351460] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1552.351258] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1552.351551] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1552.351551] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 1552.371803] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1552.371999] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1552.372178] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1552.372313] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1552.372436] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1552.372557] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1552.372676] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1552.372794] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1552.372912] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1552.373027] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1552.373142] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 1555.351553] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1555.351965] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1557.351518] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1558.352404] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1558.363349] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1558.363555] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1558.363720] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1558.363871] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1558.365008] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84a185b0-1c2d-467c-b95a-46b045d899a8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.374051] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76fc04bf-a1a4-4730-96bb-3b472c04f54e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.388505] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd5fb3d-5b5f-4c2f-8505-3afdac2619b5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.395785] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e32e9ed-ff4e-4616-8745-e5ed9a49bb7a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.424894] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181949MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1558.425171] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1558.425253] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1558.492606] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance b2dbce45-4bfa-4356-b608-e44e5a15c081 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1558.492761] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1558.492886] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance f7e1c6c5-752e-4fef-b84f-232b2dbee4a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1558.493009] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance eafa3522-51e3-4582-b060-3e3ac4224ae2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1558.493129] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance c9f74904-0558-42e6-a454-c7103b2873b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1558.493265] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance efb3f108-d3b3-4ebf-a51f-84dc8274f857 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1558.493383] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ebd4070e-7944-4d2f-8668-01d0ceca0c67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1558.493534] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d6f7828e-6617-40ca-9f6c-e3a72c328dc9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1558.493695] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 95ba0df5-846c-4e5d-94e6-fd9c43dcc191 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1558.493824] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d69cb3f5-b385-432a-b562-87d0b1b0877b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1558.505172] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 11a56664-d4bc-4090-8de0-52cfeb8f37e3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1558.515275] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 97840d8b-90ee-432e-988a-30548b61381b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1558.524586] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 51d7ab97-1a1c-49e6-a989-cb1b49e30d2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1558.534572] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance f92ea472-0354-45a7-a29a-569faaf460f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1558.544430] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance b1473dd0-5427-496c-a94c-5772635b229f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1558.555735] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 19162a19-9d7c-49b7-ad55-948d2126a61b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1558.566621] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 0628d2ec-4e42-4bd2-a819-a6f8e2252469 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1558.578665] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance f85e2a53-adea-4581-b453-2b96af0ebc70 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1558.590221] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4a360bf5-a22e-4e94-9274-be83ea0f6e5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1558.590424] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1558.590565] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1558.832606] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7d2e6b-d9ab-4e85-85c2-57616093921b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.840436] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9afc92d2-5da8-4fdb-9e23-0c2968971cc4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.870106] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e8ea0d-5422-4f7b-bfff-b879f361f379 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.877503] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f065cf6e-b3ad-40ab-918b-549b1d234ff6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.890308] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1558.898914] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1558.915273] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1558.915512] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.490s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1559.914782] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1561.351689] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1563.351613] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1563.351937] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 1576.339507] env[61868]: WARNING oslo_vmware.rw_handles [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1576.339507] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1576.339507] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1576.339507] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1576.339507] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1576.339507] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 1576.339507] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1576.339507] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1576.339507] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1576.339507] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1576.339507] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1576.339507] env[61868]: ERROR oslo_vmware.rw_handles [ 1576.340148] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/59791cab-c01d-45f8-9547-1057cd6ab90f/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1576.342337] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1576.342609] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Copying Virtual Disk [datastore2] vmware_temp/59791cab-c01d-45f8-9547-1057cd6ab90f/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/59791cab-c01d-45f8-9547-1057cd6ab90f/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1576.342935] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a30e40c7-9493-421a-9983-660838b794e8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.353024] env[61868]: DEBUG oslo_vmware.api [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Waiting for the task: (returnval){ [ 1576.353024] env[61868]: value = "task-41129" [ 1576.353024] env[61868]: _type = "Task" [ 1576.353024] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.364873] env[61868]: DEBUG oslo_vmware.api [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Task: {'id': task-41129, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.863635] env[61868]: DEBUG oslo_vmware.exceptions [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1576.863928] env[61868]: DEBUG oslo_concurrency.lockutils [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1576.864540] env[61868]: ERROR nova.compute.manager [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1576.864540] env[61868]: Faults: ['InvalidArgument'] [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Traceback (most recent call last): [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] yield resources [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] self.driver.spawn(context, instance, image_meta, [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] self._fetch_image_if_missing(context, vi) [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] image_cache(vi, tmp_image_ds_loc) [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] vm_util.copy_virtual_disk( [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] session._wait_for_task(vmdk_copy_task) [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] return self.wait_for_task(task_ref) [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] return evt.wait() [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] result = hub.switch() [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] return self.greenlet.switch() [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] self.f(*self.args, **self.kw) [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] raise exceptions.translate_fault(task_info.error) [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Faults: ['InvalidArgument'] [ 1576.864540] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] [ 1576.865737] env[61868]: INFO nova.compute.manager [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Terminating instance [ 1576.866517] env[61868]: DEBUG oslo_concurrency.lockutils [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1576.866719] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1576.866955] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f9bc04ba-6381-401f-a3ce-48d73d9202c5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.869315] env[61868]: DEBUG nova.compute.manager [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1576.869506] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1576.870224] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c916b7a9-6c50-4f92-82b3-b484b49f8b9c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.877206] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1576.877448] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-00560645-742e-44b4-91fb-c1b2af0a238a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.879608] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1576.879779] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1576.880717] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63adbc88-73bc-4406-8738-3789d26e562e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.886281] env[61868]: DEBUG oslo_vmware.api [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Waiting for the task: (returnval){ [ 1576.886281] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]521327db-05ca-f77a-fdde-234b7643c734" [ 1576.886281] env[61868]: _type = "Task" [ 1576.886281] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.893790] env[61868]: DEBUG oslo_vmware.api [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]521327db-05ca-f77a-fdde-234b7643c734, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.944428] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1576.944651] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1576.944830] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Deleting the datastore file [datastore2] b2dbce45-4bfa-4356-b608-e44e5a15c081 {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1576.945128] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e4b82b5-00a0-4af4-8832-0ea6f8f14891 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.951867] env[61868]: DEBUG oslo_vmware.api [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Waiting for the task: (returnval){ [ 1576.951867] env[61868]: value = "task-41131" [ 1576.951867] env[61868]: _type = "Task" [ 1576.951867] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.959597] env[61868]: DEBUG oslo_vmware.api [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Task: {'id': task-41131, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.397085] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1577.397464] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Creating directory with path [datastore2] vmware_temp/f665b020-3458-407a-b8af-a3a7b9c934c0/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1577.397572] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-beca86f9-cf16-4d14-8fdb-a0c260e9feb3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.409465] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Created directory with path [datastore2] vmware_temp/f665b020-3458-407a-b8af-a3a7b9c934c0/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1577.409681] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Fetch image to [datastore2] vmware_temp/f665b020-3458-407a-b8af-a3a7b9c934c0/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1577.409851] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/f665b020-3458-407a-b8af-a3a7b9c934c0/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1577.410618] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-163f1b86-bdf4-4f80-8cb7-8fa18e3334fc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.419681] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeaa74cc-e0c0-4e53-968f-fb380bb3ad64 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.429837] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-412e651b-5c0b-40dc-83dc-27a6f1d2f690 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.462967] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9452496c-8dd9-47c2-836e-7d300c99d395 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.472434] env[61868]: DEBUG oslo_vmware.api [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Task: {'id': task-41131, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078365} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.473953] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1577.474145] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1577.474316] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1577.474490] env[61868]: INFO nova.compute.manager [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1577.476248] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a5acc02a-4fce-4bf2-9773-7fa78981436b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.478151] env[61868]: DEBUG nova.compute.claims [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1577.478326] env[61868]: DEBUG oslo_concurrency.lockutils [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1577.478538] env[61868]: DEBUG oslo_concurrency.lockutils [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1577.504907] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1577.558093] env[61868]: DEBUG oslo_vmware.rw_handles [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f665b020-3458-407a-b8af-a3a7b9c934c0/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1577.616265] env[61868]: DEBUG oslo_vmware.rw_handles [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1577.616443] env[61868]: DEBUG oslo_vmware.rw_handles [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f665b020-3458-407a-b8af-a3a7b9c934c0/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1577.805999] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-610165b3-69f4-400c-bc5b-d2091f33e2f4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.813398] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-562a17e0-f1cf-432c-8aed-925fb0e021e0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.842592] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c9c7de-3171-4378-8c63-bbd56bece86d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.851308] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31235673-e712-4703-aa8b-7033a2991720 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.864786] env[61868]: DEBUG nova.compute.provider_tree [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1577.873985] env[61868]: DEBUG nova.scheduler.client.report [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1577.892361] env[61868]: DEBUG oslo_concurrency.lockutils [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.414s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1577.892980] env[61868]: ERROR nova.compute.manager [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1577.892980] env[61868]: Faults: ['InvalidArgument'] [ 1577.892980] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Traceback (most recent call last): [ 1577.892980] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1577.892980] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] self.driver.spawn(context, instance, image_meta, [ 1577.892980] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1577.892980] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1577.892980] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1577.892980] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] self._fetch_image_if_missing(context, vi) [ 1577.892980] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1577.892980] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] image_cache(vi, tmp_image_ds_loc) [ 1577.892980] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1577.892980] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] vm_util.copy_virtual_disk( [ 1577.892980] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1577.892980] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] session._wait_for_task(vmdk_copy_task) [ 1577.892980] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1577.892980] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] return self.wait_for_task(task_ref) [ 1577.892980] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1577.892980] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] return evt.wait() [ 1577.892980] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1577.892980] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] result = hub.switch() [ 1577.892980] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1577.892980] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] return self.greenlet.switch() [ 1577.892980] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1577.892980] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] self.f(*self.args, **self.kw) [ 1577.892980] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1577.892980] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] raise exceptions.translate_fault(task_info.error) [ 1577.892980] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1577.892980] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Faults: ['InvalidArgument'] [ 1577.892980] env[61868]: ERROR nova.compute.manager [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] [ 1577.893886] env[61868]: DEBUG nova.compute.utils [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1577.895269] env[61868]: DEBUG nova.compute.manager [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Build of instance b2dbce45-4bfa-4356-b608-e44e5a15c081 was re-scheduled: A specified parameter was not correct: fileType [ 1577.895269] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1577.895649] env[61868]: DEBUG nova.compute.manager [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1577.895822] env[61868]: DEBUG nova.compute.manager [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1577.895990] env[61868]: DEBUG nova.compute.manager [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1577.896211] env[61868]: DEBUG nova.network.neutron [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1578.152562] env[61868]: DEBUG nova.network.neutron [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1578.166284] env[61868]: INFO nova.compute.manager [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Took 0.27 seconds to deallocate network for instance. [ 1578.269582] env[61868]: INFO nova.scheduler.client.report [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Deleted allocations for instance b2dbce45-4bfa-4356-b608-e44e5a15c081 [ 1578.289611] env[61868]: DEBUG oslo_concurrency.lockutils [None req-af3d0431-9866-493d-8f3d-8f42888a9169 tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Lock "b2dbce45-4bfa-4356-b608-e44e5a15c081" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 578.669s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1578.290790] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "b2dbce45-4bfa-4356-b608-e44e5a15c081" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 428.630s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1578.291058] env[61868]: INFO nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] During sync_power_state the instance has a pending task (spawning). Skip. [ 1578.291233] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "b2dbce45-4bfa-4356-b608-e44e5a15c081" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1578.291889] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a0e6f152-3a1e-4173-a369-13163758e35a tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Lock "b2dbce45-4bfa-4356-b608-e44e5a15c081" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 381.995s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1578.292157] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a0e6f152-3a1e-4173-a369-13163758e35a tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Acquiring lock "b2dbce45-4bfa-4356-b608-e44e5a15c081-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1578.292417] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a0e6f152-3a1e-4173-a369-13163758e35a tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Lock "b2dbce45-4bfa-4356-b608-e44e5a15c081-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1578.292521] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a0e6f152-3a1e-4173-a369-13163758e35a tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Lock "b2dbce45-4bfa-4356-b608-e44e5a15c081-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1578.294309] env[61868]: INFO nova.compute.manager [None req-a0e6f152-3a1e-4173-a369-13163758e35a tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Terminating instance [ 1578.295964] env[61868]: DEBUG nova.compute.manager [None req-a0e6f152-3a1e-4173-a369-13163758e35a tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1578.296168] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-a0e6f152-3a1e-4173-a369-13163758e35a tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1578.296426] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-33971c45-6ff9-4881-8edc-86f58e005409 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.307385] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8ec2397-a4c0-4c85-8d2e-d76933120c02 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.319276] env[61868]: DEBUG nova.compute.manager [None req-81602054-c433-4e16-bd4f-521214876c1f tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: 352b5eae-b809-4cb5-8970-ebad9fba78bc] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1578.344987] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-a0e6f152-3a1e-4173-a369-13163758e35a tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b2dbce45-4bfa-4356-b608-e44e5a15c081 could not be found. [ 1578.345212] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-a0e6f152-3a1e-4173-a369-13163758e35a tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1578.345395] env[61868]: INFO nova.compute.manager [None req-a0e6f152-3a1e-4173-a369-13163758e35a tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1578.345905] env[61868]: DEBUG oslo.service.loopingcall [None req-a0e6f152-3a1e-4173-a369-13163758e35a tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1578.345905] env[61868]: DEBUG nova.compute.manager [-] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1578.346060] env[61868]: DEBUG nova.network.neutron [-] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1578.349232] env[61868]: DEBUG nova.compute.manager [None req-81602054-c433-4e16-bd4f-521214876c1f tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: 352b5eae-b809-4cb5-8970-ebad9fba78bc] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1578.377403] env[61868]: DEBUG oslo_concurrency.lockutils [None req-81602054-c433-4e16-bd4f-521214876c1f tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Lock "352b5eae-b809-4cb5-8970-ebad9fba78bc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 241.498s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1578.382988] env[61868]: DEBUG nova.network.neutron [-] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1578.391023] env[61868]: DEBUG nova.compute.manager [None req-22f999bb-c19e-430e-97c1-138316b1186d tempest-ServerPasswordTestJSON-936178413 tempest-ServerPasswordTestJSON-936178413-project-member] [instance: 11a56664-d4bc-4090-8de0-52cfeb8f37e3] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1578.395771] env[61868]: INFO nova.compute.manager [-] [instance: b2dbce45-4bfa-4356-b608-e44e5a15c081] Took 0.05 seconds to deallocate network for instance. [ 1578.432148] env[61868]: DEBUG nova.compute.manager [None req-22f999bb-c19e-430e-97c1-138316b1186d tempest-ServerPasswordTestJSON-936178413 tempest-ServerPasswordTestJSON-936178413-project-member] [instance: 11a56664-d4bc-4090-8de0-52cfeb8f37e3] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1578.462671] env[61868]: DEBUG oslo_concurrency.lockutils [None req-22f999bb-c19e-430e-97c1-138316b1186d tempest-ServerPasswordTestJSON-936178413 tempest-ServerPasswordTestJSON-936178413-project-member] Lock "11a56664-d4bc-4090-8de0-52cfeb8f37e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 196.036s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1578.477854] env[61868]: DEBUG nova.compute.manager [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1578.505673] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a0e6f152-3a1e-4173-a369-13163758e35a tempest-ServerRescueTestJSONUnderV235-65706538 tempest-ServerRescueTestJSONUnderV235-65706538-project-member] Lock "b2dbce45-4bfa-4356-b608-e44e5a15c081" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.214s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1578.540720] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1578.541094] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1578.542511] env[61868]: INFO nova.compute.claims [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1578.823057] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ca2c568-6e50-4708-8624-59d5bfad9bc1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.832467] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ccc4c71-4b69-49f6-a217-521a4d1be86a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.862310] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-227ab8a0-a436-4448-9a43-4134a55a25b0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.870413] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eff8671-999f-4629-882a-523423db822d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.884814] env[61868]: DEBUG nova.compute.provider_tree [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1578.895522] env[61868]: DEBUG nova.scheduler.client.report [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1578.911681] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.371s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1578.912401] env[61868]: DEBUG nova.compute.manager [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1578.959936] env[61868]: DEBUG nova.compute.utils [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1578.962506] env[61868]: DEBUG nova.compute.manager [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Not allocating networking since 'none' was specified. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 1578.975172] env[61868]: DEBUG nova.compute.manager [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1579.047994] env[61868]: DEBUG nova.compute.manager [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1579.071233] env[61868]: DEBUG nova.virt.hardware [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1579.071502] env[61868]: DEBUG nova.virt.hardware [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1579.071662] env[61868]: DEBUG nova.virt.hardware [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1579.071844] env[61868]: DEBUG nova.virt.hardware [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1579.072090] env[61868]: DEBUG nova.virt.hardware [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1579.072269] env[61868]: DEBUG nova.virt.hardware [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1579.072480] env[61868]: DEBUG nova.virt.hardware [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1579.072641] env[61868]: DEBUG nova.virt.hardware [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1579.072804] env[61868]: DEBUG nova.virt.hardware [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1579.072966] env[61868]: DEBUG nova.virt.hardware [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1579.073141] env[61868]: DEBUG nova.virt.hardware [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1579.074069] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d49b1b3b-51c0-4957-9de2-190376fd8972 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.082227] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa37c40-168d-4e6c-8d7d-7c488c71e8f9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.097576] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Instance VIF info [] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1579.102965] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Creating folder: Project (7357796466124ba883a96248a65a58fd). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1579.103268] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-901c9403-bf02-41d9-8157-c251ddef4693 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.114468] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Created folder: Project (7357796466124ba883a96248a65a58fd) in parent group-v18181. [ 1579.114679] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Creating folder: Instances. Parent ref: group-v18285. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1579.114935] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-91f0cc19-25d5-4371-9f74-11f20ea3800f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.124821] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Created folder: Instances in parent group-v18285. [ 1579.125076] env[61868]: DEBUG oslo.service.loopingcall [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1579.125272] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1579.125510] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c604a812-8b8e-41a7-a82e-49232dfec3e8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.143135] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1579.143135] env[61868]: value = "task-41134" [ 1579.143135] env[61868]: _type = "Task" [ 1579.143135] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.151198] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41134, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.653868] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41134, 'name': CreateVM_Task, 'duration_secs': 0.263634} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.654169] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1579.654338] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1579.654570] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1579.657507] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15019fab-9c2a-4954-8acc-8737c78eff2e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.706002] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Reconfiguring VM instance to enable vnc on port - 5900 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 1579.706462] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-20372c31-b9d0-4408-895b-1b618ba9e10a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.726221] env[61868]: DEBUG oslo_vmware.api [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Waiting for the task: (returnval){ [ 1579.726221] env[61868]: value = "task-41135" [ 1579.726221] env[61868]: _type = "Task" [ 1579.726221] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.734627] env[61868]: DEBUG oslo_vmware.api [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Task: {'id': task-41135, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.236638] env[61868]: DEBUG oslo_vmware.api [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Task: {'id': task-41135, 'name': ReconfigVM_Task, 'duration_secs': 0.111221} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.237095] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Reconfigured VM instance to enable vnc on port - 5900 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 1580.237364] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.583s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1580.237721] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1580.237900] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1580.238256] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1580.238564] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d87ca76-8da1-4fa3-9a24-d1a36e24742a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.246657] env[61868]: DEBUG oslo_vmware.api [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Waiting for the task: (returnval){ [ 1580.246657] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]521450a8-5e27-5e94-75c0-2f953c24d8c7" [ 1580.246657] env[61868]: _type = "Task" [ 1580.246657] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.263981] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1580.264402] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1580.264669] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1584.868472] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Acquiring lock "c6c6e502-a2aa-4f9c-be05-bf6b50078abf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1584.868771] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Lock "c6c6e502-a2aa-4f9c-be05-bf6b50078abf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1590.754427] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0a028e6c-2499-4241-80c8-10cc1a9b556e tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Acquiring lock "97840d8b-90ee-432e-988a-30548b61381b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1592.423264] env[61868]: DEBUG oslo_concurrency.lockutils [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "36be944d-04bc-45cd-8019-173437f8ffa5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1592.423659] env[61868]: DEBUG oslo_concurrency.lockutils [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "36be944d-04bc-45cd-8019-173437f8ffa5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1608.351441] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1614.351340] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1614.351677] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1614.351677] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 1614.372158] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1614.372343] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1614.372461] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1614.372661] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1614.372727] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1614.372911] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1614.372911] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1614.373044] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1614.373138] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1614.373243] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1614.373381] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 1616.369592] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1616.516994] env[61868]: DEBUG oslo_concurrency.lockutils [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "e453b684-a54b-46b3-b9ea-4ab9352965f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1616.517600] env[61868]: DEBUG oslo_concurrency.lockutils [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "e453b684-a54b-46b3-b9ea-4ab9352965f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1617.351471] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1618.351804] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1619.347053] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1619.367867] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1619.377152] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1619.377385] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1619.377551] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1619.377701] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1619.378760] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7576336-225a-4cea-9090-074700a85245 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.388253] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a05344-6c9f-4f8f-b0d1-a7540524efdd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.402986] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4efada51-9b29-4c46-b8f2-22f9906a477c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.409492] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4bb5ca6-9d87-4e69-b83c-9eab79b72d0e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.442167] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181939MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1619.442167] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1619.442167] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1619.516712] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1619.516873] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance f7e1c6c5-752e-4fef-b84f-232b2dbee4a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1619.516999] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance eafa3522-51e3-4582-b060-3e3ac4224ae2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1619.517122] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance c9f74904-0558-42e6-a454-c7103b2873b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1619.517264] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance efb3f108-d3b3-4ebf-a51f-84dc8274f857 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1619.517350] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ebd4070e-7944-4d2f-8668-01d0ceca0c67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1619.517465] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d6f7828e-6617-40ca-9f6c-e3a72c328dc9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1619.517578] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 95ba0df5-846c-4e5d-94e6-fd9c43dcc191 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1619.517692] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d69cb3f5-b385-432a-b562-87d0b1b0877b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1619.517803] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 97840d8b-90ee-432e-988a-30548b61381b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1619.530902] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance b1473dd0-5427-496c-a94c-5772635b229f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1619.542431] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 19162a19-9d7c-49b7-ad55-948d2126a61b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1619.553309] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 0628d2ec-4e42-4bd2-a819-a6f8e2252469 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1619.566407] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance f85e2a53-adea-4581-b453-2b96af0ebc70 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1619.576053] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4a360bf5-a22e-4e94-9274-be83ea0f6e5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1619.586037] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance c6c6e502-a2aa-4f9c-be05-bf6b50078abf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1619.597442] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 36be944d-04bc-45cd-8019-173437f8ffa5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1619.608357] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance e453b684-a54b-46b3-b9ea-4ab9352965f7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1619.608598] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1619.608746] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1619.836269] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be772c5e-7304-4c9c-9a2c-c1c1545a1afb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.843905] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c2d89ed-d216-4792-a34b-ae505c5673ee {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.876421] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f00091f2-7db3-4316-87b9-09ebe2720419 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.884627] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e49bf8-45ac-4333-82cd-25c88f41af41 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.898572] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1619.907416] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1619.925948] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1619.926149] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.485s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1620.910103] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1623.351275] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1624.351121] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1624.351304] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 1625.755783] env[61868]: WARNING oslo_vmware.rw_handles [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1625.755783] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1625.755783] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1625.755783] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1625.755783] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1625.755783] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 1625.755783] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1625.755783] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1625.755783] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1625.755783] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1625.755783] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1625.755783] env[61868]: ERROR oslo_vmware.rw_handles [ 1625.756974] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/f665b020-3458-407a-b8af-a3a7b9c934c0/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1625.758195] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1625.758433] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Copying Virtual Disk [datastore2] vmware_temp/f665b020-3458-407a-b8af-a3a7b9c934c0/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/f665b020-3458-407a-b8af-a3a7b9c934c0/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1625.758715] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f1c4c5b9-a981-4053-980e-8335b076fed2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.768102] env[61868]: DEBUG oslo_vmware.api [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Waiting for the task: (returnval){ [ 1625.768102] env[61868]: value = "task-41136" [ 1625.768102] env[61868]: _type = "Task" [ 1625.768102] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.776176] env[61868]: DEBUG oslo_vmware.api [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Task: {'id': task-41136, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.279370] env[61868]: DEBUG oslo_vmware.exceptions [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1626.279727] env[61868]: DEBUG oslo_concurrency.lockutils [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1626.280257] env[61868]: ERROR nova.compute.manager [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1626.280257] env[61868]: Faults: ['InvalidArgument'] [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Traceback (most recent call last): [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] yield resources [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] self.driver.spawn(context, instance, image_meta, [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] self._fetch_image_if_missing(context, vi) [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] image_cache(vi, tmp_image_ds_loc) [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] vm_util.copy_virtual_disk( [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] session._wait_for_task(vmdk_copy_task) [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] return self.wait_for_task(task_ref) [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] return evt.wait() [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] result = hub.switch() [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] return self.greenlet.switch() [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] self.f(*self.args, **self.kw) [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] raise exceptions.translate_fault(task_info.error) [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Faults: ['InvalidArgument'] [ 1626.280257] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] [ 1626.281433] env[61868]: INFO nova.compute.manager [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Terminating instance [ 1626.283496] env[61868]: DEBUG nova.compute.manager [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1626.283696] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1626.283998] env[61868]: DEBUG oslo_concurrency.lockutils [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1626.284246] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1626.284984] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea4de3c2-a5a4-4285-80eb-df904ff77dbf {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.287687] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c81b9932-478a-4019-8362-76bbd52252b0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.294056] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1626.294342] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e3a5924-984a-46b2-a45a-777785884424 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.297414] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1626.297577] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1626.298509] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f6c0695-72fe-4cac-ac9f-b138bfa2c396 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.303612] env[61868]: DEBUG oslo_vmware.api [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Waiting for the task: (returnval){ [ 1626.303612] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]526c13a4-44f9-bb88-4953-46fd6d3d3f0c" [ 1626.303612] env[61868]: _type = "Task" [ 1626.303612] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.312044] env[61868]: DEBUG oslo_vmware.api [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]526c13a4-44f9-bb88-4953-46fd6d3d3f0c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.372388] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1626.372781] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1626.373064] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Deleting the datastore file [datastore2] 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1626.373405] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3a6865a0-4ee1-47f4-a959-bcc6d9c9a569 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.380358] env[61868]: DEBUG oslo_vmware.api [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Waiting for the task: (returnval){ [ 1626.380358] env[61868]: value = "task-41138" [ 1626.380358] env[61868]: _type = "Task" [ 1626.380358] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.389004] env[61868]: DEBUG oslo_vmware.api [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Task: {'id': task-41138, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.813933] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1626.814341] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Creating directory with path [datastore2] vmware_temp/ad78c470-e217-4253-ab83-862e32f93f43/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1626.814431] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-498d3756-c0ea-47b2-99e3-cac88380e541 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.825732] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Created directory with path [datastore2] vmware_temp/ad78c470-e217-4253-ab83-862e32f93f43/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1626.825923] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Fetch image to [datastore2] vmware_temp/ad78c470-e217-4253-ab83-862e32f93f43/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1626.826096] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/ad78c470-e217-4253-ab83-862e32f93f43/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1626.826831] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75044cbe-9870-4a37-bf2d-2250f84120d3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.833552] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7b5982c-921f-4893-86b4-5742731d6401 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.843015] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eac588f-c8a6-4fa2-acc6-e052f458ec04 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.874317] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59b2e868-f2cf-47a6-83f1-4387a0c70d8f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.883840] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-39d1ea80-a67d-4af0-a227-038821e29fb0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.890505] env[61868]: DEBUG oslo_vmware.api [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Task: {'id': task-41138, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068258} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.890757] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1626.890972] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1626.891136] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1626.891313] env[61868]: INFO nova.compute.manager [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1626.893457] env[61868]: DEBUG nova.compute.claims [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1626.893630] env[61868]: DEBUG oslo_concurrency.lockutils [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1626.893838] env[61868]: DEBUG oslo_concurrency.lockutils [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1626.905954] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1626.961809] env[61868]: DEBUG oslo_vmware.rw_handles [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ad78c470-e217-4253-ab83-862e32f93f43/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1627.019773] env[61868]: DEBUG oslo_vmware.rw_handles [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1627.020035] env[61868]: DEBUG oslo_vmware.rw_handles [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ad78c470-e217-4253-ab83-862e32f93f43/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1627.218714] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4905bb-3d6f-4efc-821e-19abcbd5ce69 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.226880] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c08e9401-6627-4c6a-b1a0-b856750d1753 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.261136] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a6d2ca4-e761-403a-9402-128a41227c3c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.269651] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b1813b-0e4a-45fa-844f-d89d75d61f81 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.284684] env[61868]: DEBUG nova.compute.provider_tree [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1627.294188] env[61868]: DEBUG nova.scheduler.client.report [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1627.311517] env[61868]: DEBUG oslo_concurrency.lockutils [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.417s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1627.312197] env[61868]: ERROR nova.compute.manager [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1627.312197] env[61868]: Faults: ['InvalidArgument'] [ 1627.312197] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Traceback (most recent call last): [ 1627.312197] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1627.312197] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] self.driver.spawn(context, instance, image_meta, [ 1627.312197] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1627.312197] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1627.312197] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1627.312197] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] self._fetch_image_if_missing(context, vi) [ 1627.312197] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1627.312197] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] image_cache(vi, tmp_image_ds_loc) [ 1627.312197] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1627.312197] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] vm_util.copy_virtual_disk( [ 1627.312197] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1627.312197] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] session._wait_for_task(vmdk_copy_task) [ 1627.312197] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1627.312197] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] return self.wait_for_task(task_ref) [ 1627.312197] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1627.312197] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] return evt.wait() [ 1627.312197] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1627.312197] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] result = hub.switch() [ 1627.312197] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1627.312197] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] return self.greenlet.switch() [ 1627.312197] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1627.312197] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] self.f(*self.args, **self.kw) [ 1627.312197] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1627.312197] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] raise exceptions.translate_fault(task_info.error) [ 1627.312197] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1627.312197] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Faults: ['InvalidArgument'] [ 1627.312197] env[61868]: ERROR nova.compute.manager [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] [ 1627.313505] env[61868]: DEBUG nova.compute.utils [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1627.315204] env[61868]: DEBUG nova.compute.manager [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Build of instance 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d was re-scheduled: A specified parameter was not correct: fileType [ 1627.315204] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1627.315666] env[61868]: DEBUG nova.compute.manager [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1627.315891] env[61868]: DEBUG nova.compute.manager [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1627.316129] env[61868]: DEBUG nova.compute.manager [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1627.316394] env[61868]: DEBUG nova.network.neutron [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1627.589061] env[61868]: DEBUG nova.network.neutron [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1627.602830] env[61868]: INFO nova.compute.manager [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Took 0.29 seconds to deallocate network for instance. [ 1627.706243] env[61868]: INFO nova.scheduler.client.report [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Deleted allocations for instance 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d [ 1627.724615] env[61868]: DEBUG oslo_concurrency.lockutils [None req-040f08fb-5eec-460d-b38f-2501476783e1 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "8ee97cc3-458c-4fd5-8f67-f7e877b44b0d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 625.169s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1627.725774] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8673bbf2-d4c4-4289-b648-0f022772fe01 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "8ee97cc3-458c-4fd5-8f67-f7e877b44b0d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 428.692s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1627.725995] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8673bbf2-d4c4-4289-b648-0f022772fe01 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "8ee97cc3-458c-4fd5-8f67-f7e877b44b0d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1627.726342] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8673bbf2-d4c4-4289-b648-0f022772fe01 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "8ee97cc3-458c-4fd5-8f67-f7e877b44b0d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1627.726445] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8673bbf2-d4c4-4289-b648-0f022772fe01 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "8ee97cc3-458c-4fd5-8f67-f7e877b44b0d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1627.728316] env[61868]: INFO nova.compute.manager [None req-8673bbf2-d4c4-4289-b648-0f022772fe01 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Terminating instance [ 1627.730291] env[61868]: DEBUG nova.compute.manager [None req-8673bbf2-d4c4-4289-b648-0f022772fe01 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1627.730632] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-8673bbf2-d4c4-4289-b648-0f022772fe01 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1627.731089] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d95ea079-d00f-4b5d-b3f0-e5142370f6b7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.743337] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-919e6a7e-79c4-4e34-8ebe-34e9b0b17d2e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.755028] env[61868]: DEBUG nova.compute.manager [None req-2a9fdf4d-83d0-43c8-ac33-d6af08d4e0d1 tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 51d7ab97-1a1c-49e6-a989-cb1b49e30d2e] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1627.780489] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-8673bbf2-d4c4-4289-b648-0f022772fe01 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d could not be found. [ 1627.780995] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-8673bbf2-d4c4-4289-b648-0f022772fe01 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1627.780995] env[61868]: INFO nova.compute.manager [None req-8673bbf2-d4c4-4289-b648-0f022772fe01 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1627.781167] env[61868]: DEBUG oslo.service.loopingcall [None req-8673bbf2-d4c4-4289-b648-0f022772fe01 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1627.781402] env[61868]: DEBUG nova.compute.manager [-] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1627.781497] env[61868]: DEBUG nova.network.neutron [-] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1627.786076] env[61868]: DEBUG nova.compute.manager [None req-2a9fdf4d-83d0-43c8-ac33-d6af08d4e0d1 tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 51d7ab97-1a1c-49e6-a989-cb1b49e30d2e] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1627.810370] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2a9fdf4d-83d0-43c8-ac33-d6af08d4e0d1 tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Lock "51d7ab97-1a1c-49e6-a989-cb1b49e30d2e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.607s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1627.823135] env[61868]: DEBUG nova.compute.manager [None req-a7b2802f-e138-4b2d-80c5-6dbe5aadd44d tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: f92ea472-0354-45a7-a29a-569faaf460f0] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1627.827199] env[61868]: DEBUG nova.network.neutron [-] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1627.834984] env[61868]: INFO nova.compute.manager [-] [instance: 8ee97cc3-458c-4fd5-8f67-f7e877b44b0d] Took 0.05 seconds to deallocate network for instance. [ 1627.877919] env[61868]: DEBUG nova.compute.manager [None req-a7b2802f-e138-4b2d-80c5-6dbe5aadd44d tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: f92ea472-0354-45a7-a29a-569faaf460f0] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1627.915692] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a7b2802f-e138-4b2d-80c5-6dbe5aadd44d tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "f92ea472-0354-45a7-a29a-569faaf460f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.115s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1627.934011] env[61868]: DEBUG nova.compute.manager [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1627.959940] env[61868]: DEBUG oslo_concurrency.lockutils [None req-8673bbf2-d4c4-4289-b648-0f022772fe01 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "8ee97cc3-458c-4fd5-8f67-f7e877b44b0d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.234s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1627.991964] env[61868]: DEBUG oslo_concurrency.lockutils [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1627.992261] env[61868]: DEBUG oslo_concurrency.lockutils [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1627.993745] env[61868]: INFO nova.compute.claims [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1628.309774] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674953b0-08f2-4e37-a055-6957c047d389 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.319015] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c54c2c-a0d4-4999-bdea-395b48628a66 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.351876] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a58cf94-cfdc-4c3a-b482-3b3e83d0ff25 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.361586] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-523a42ff-5ef5-427a-86e5-1d412dbd284e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.376614] env[61868]: DEBUG nova.compute.provider_tree [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1628.385552] env[61868]: DEBUG nova.scheduler.client.report [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1628.405699] env[61868]: DEBUG oslo_concurrency.lockutils [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.413s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1628.406249] env[61868]: DEBUG nova.compute.manager [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1628.441827] env[61868]: DEBUG nova.compute.utils [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1628.444244] env[61868]: DEBUG nova.compute.manager [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1628.444484] env[61868]: DEBUG nova.network.neutron [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1628.460035] env[61868]: DEBUG nova.compute.manager [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1628.496610] env[61868]: DEBUG nova.policy [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '060444b5c1fb4d999c2b3a774e09ca78', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a418f11ddf3b47f7ab0cc46234c91d44', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 1628.536032] env[61868]: DEBUG nova.compute.manager [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1628.560184] env[61868]: DEBUG nova.virt.hardware [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1628.560669] env[61868]: DEBUG nova.virt.hardware [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1628.560843] env[61868]: DEBUG nova.virt.hardware [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1628.561070] env[61868]: DEBUG nova.virt.hardware [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1628.561228] env[61868]: DEBUG nova.virt.hardware [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1628.561376] env[61868]: DEBUG nova.virt.hardware [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1628.561589] env[61868]: DEBUG nova.virt.hardware [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1628.561752] env[61868]: DEBUG nova.virt.hardware [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1628.561921] env[61868]: DEBUG nova.virt.hardware [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1628.562101] env[61868]: DEBUG nova.virt.hardware [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1628.562291] env[61868]: DEBUG nova.virt.hardware [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1628.563183] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-708407eb-9b8f-4705-a6ea-dcb5da66b268 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.572933] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba377886-5b8a-41b1-9116-e39ea5425668 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.801601] env[61868]: DEBUG nova.network.neutron [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Successfully created port: 6d47af2d-c7a5-4708-ace3-8c311664b936 {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1629.407505] env[61868]: DEBUG nova.network.neutron [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Successfully updated port: 6d47af2d-c7a5-4708-ace3-8c311664b936 {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1629.423096] env[61868]: DEBUG oslo_concurrency.lockutils [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Acquiring lock "refresh_cache-b1473dd0-5427-496c-a94c-5772635b229f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1629.423281] env[61868]: DEBUG oslo_concurrency.lockutils [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Acquired lock "refresh_cache-b1473dd0-5427-496c-a94c-5772635b229f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1629.423385] env[61868]: DEBUG nova.network.neutron [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1629.483161] env[61868]: DEBUG nova.network.neutron [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1629.648439] env[61868]: DEBUG nova.compute.manager [req-4ac222bb-0def-4adb-9588-010bb54794be req-dc9e5e0c-698c-4cbb-839e-b89befae40f3 service nova] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Received event network-vif-plugged-6d47af2d-c7a5-4708-ace3-8c311664b936 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1629.648642] env[61868]: DEBUG oslo_concurrency.lockutils [req-4ac222bb-0def-4adb-9588-010bb54794be req-dc9e5e0c-698c-4cbb-839e-b89befae40f3 service nova] Acquiring lock "b1473dd0-5427-496c-a94c-5772635b229f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1629.648862] env[61868]: DEBUG oslo_concurrency.lockutils [req-4ac222bb-0def-4adb-9588-010bb54794be req-dc9e5e0c-698c-4cbb-839e-b89befae40f3 service nova] Lock "b1473dd0-5427-496c-a94c-5772635b229f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1629.649071] env[61868]: DEBUG oslo_concurrency.lockutils [req-4ac222bb-0def-4adb-9588-010bb54794be req-dc9e5e0c-698c-4cbb-839e-b89befae40f3 service nova] Lock "b1473dd0-5427-496c-a94c-5772635b229f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1629.649248] env[61868]: DEBUG nova.compute.manager [req-4ac222bb-0def-4adb-9588-010bb54794be req-dc9e5e0c-698c-4cbb-839e-b89befae40f3 service nova] [instance: b1473dd0-5427-496c-a94c-5772635b229f] No waiting events found dispatching network-vif-plugged-6d47af2d-c7a5-4708-ace3-8c311664b936 {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1629.649409] env[61868]: WARNING nova.compute.manager [req-4ac222bb-0def-4adb-9588-010bb54794be req-dc9e5e0c-698c-4cbb-839e-b89befae40f3 service nova] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Received unexpected event network-vif-plugged-6d47af2d-c7a5-4708-ace3-8c311664b936 for instance with vm_state building and task_state spawning. [ 1629.649567] env[61868]: DEBUG nova.compute.manager [req-4ac222bb-0def-4adb-9588-010bb54794be req-dc9e5e0c-698c-4cbb-839e-b89befae40f3 service nova] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Received event network-changed-6d47af2d-c7a5-4708-ace3-8c311664b936 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1629.649713] env[61868]: DEBUG nova.compute.manager [req-4ac222bb-0def-4adb-9588-010bb54794be req-dc9e5e0c-698c-4cbb-839e-b89befae40f3 service nova] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Refreshing instance network info cache due to event network-changed-6d47af2d-c7a5-4708-ace3-8c311664b936. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1629.649872] env[61868]: DEBUG oslo_concurrency.lockutils [req-4ac222bb-0def-4adb-9588-010bb54794be req-dc9e5e0c-698c-4cbb-839e-b89befae40f3 service nova] Acquiring lock "refresh_cache-b1473dd0-5427-496c-a94c-5772635b229f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1629.885453] env[61868]: DEBUG nova.network.neutron [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Updating instance_info_cache with network_info: [{"id": "6d47af2d-c7a5-4708-ace3-8c311664b936", "address": "fa:16:3e:69:4d:49", "network": {"id": "02a8ab26-0c89-492d-b26e-6d557da8b717", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-549270901-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "a418f11ddf3b47f7ab0cc46234c91d44", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d47af2d-c7", "ovs_interfaceid": "6d47af2d-c7a5-4708-ace3-8c311664b936", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1629.900556] env[61868]: DEBUG oslo_concurrency.lockutils [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Releasing lock "refresh_cache-b1473dd0-5427-496c-a94c-5772635b229f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1629.900855] env[61868]: DEBUG nova.compute.manager [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Instance network_info: |[{"id": "6d47af2d-c7a5-4708-ace3-8c311664b936", "address": "fa:16:3e:69:4d:49", "network": {"id": "02a8ab26-0c89-492d-b26e-6d557da8b717", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-549270901-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "a418f11ddf3b47f7ab0cc46234c91d44", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d47af2d-c7", "ovs_interfaceid": "6d47af2d-c7a5-4708-ace3-8c311664b936", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1629.901243] env[61868]: DEBUG oslo_concurrency.lockutils [req-4ac222bb-0def-4adb-9588-010bb54794be req-dc9e5e0c-698c-4cbb-839e-b89befae40f3 service nova] Acquired lock "refresh_cache-b1473dd0-5427-496c-a94c-5772635b229f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1629.901428] env[61868]: DEBUG nova.network.neutron [req-4ac222bb-0def-4adb-9588-010bb54794be req-dc9e5e0c-698c-4cbb-839e-b89befae40f3 service nova] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Refreshing network info cache for port 6d47af2d-c7a5-4708-ace3-8c311664b936 {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1629.903306] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:4d:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7874ee7f-20c7-4bd8-a750-ed489e9acc65', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6d47af2d-c7a5-4708-ace3-8c311664b936', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1629.911000] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Creating folder: Project (a418f11ddf3b47f7ab0cc46234c91d44). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1629.915087] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a90128e6-3742-465e-9196-d09a5cbdf2a0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.928896] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Created folder: Project (a418f11ddf3b47f7ab0cc46234c91d44) in parent group-v18181. [ 1629.929166] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Creating folder: Instances. Parent ref: group-v18288. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1629.929423] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-57eb4f10-a58b-4d10-a1ce-c1f96b61f05f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.939847] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Created folder: Instances in parent group-v18288. [ 1629.940123] env[61868]: DEBUG oslo.service.loopingcall [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1629.940328] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1629.940536] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3342baf3-cfe3-49cd-a026-e13781ebb359 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.964567] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1629.964567] env[61868]: value = "task-41141" [ 1629.964567] env[61868]: _type = "Task" [ 1629.964567] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.972856] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41141, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.159054] env[61868]: DEBUG nova.network.neutron [req-4ac222bb-0def-4adb-9588-010bb54794be req-dc9e5e0c-698c-4cbb-839e-b89befae40f3 service nova] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Updated VIF entry in instance network info cache for port 6d47af2d-c7a5-4708-ace3-8c311664b936. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1630.159485] env[61868]: DEBUG nova.network.neutron [req-4ac222bb-0def-4adb-9588-010bb54794be req-dc9e5e0c-698c-4cbb-839e-b89befae40f3 service nova] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Updating instance_info_cache with network_info: [{"id": "6d47af2d-c7a5-4708-ace3-8c311664b936", "address": "fa:16:3e:69:4d:49", "network": {"id": "02a8ab26-0c89-492d-b26e-6d557da8b717", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-549270901-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "a418f11ddf3b47f7ab0cc46234c91d44", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d47af2d-c7", "ovs_interfaceid": "6d47af2d-c7a5-4708-ace3-8c311664b936", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1630.171288] env[61868]: DEBUG oslo_concurrency.lockutils [req-4ac222bb-0def-4adb-9588-010bb54794be req-dc9e5e0c-698c-4cbb-839e-b89befae40f3 service nova] Releasing lock "refresh_cache-b1473dd0-5427-496c-a94c-5772635b229f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1630.476320] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41141, 'name': CreateVM_Task, 'duration_secs': 0.307315} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.476706] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1630.477098] env[61868]: DEBUG oslo_concurrency.lockutils [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1630.477403] env[61868]: DEBUG oslo_concurrency.lockutils [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1630.480249] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37dea6b0-7b53-4ce4-a460-e032b69b0b89 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.513814] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Reconfiguring VM instance to enable vnc on port - 5901 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 1630.514172] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-047688c3-be6c-408f-b88e-91a702d74cce {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.530086] env[61868]: DEBUG oslo_vmware.api [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Waiting for the task: (returnval){ [ 1630.530086] env[61868]: value = "task-41142" [ 1630.530086] env[61868]: _type = "Task" [ 1630.530086] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.540063] env[61868]: DEBUG oslo_vmware.api [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Task: {'id': task-41142, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.041153] env[61868]: DEBUG oslo_vmware.api [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Task: {'id': task-41142, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.543201] env[61868]: DEBUG oslo_vmware.api [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Task: {'id': task-41142, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.042664] env[61868]: DEBUG oslo_vmware.api [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Task: {'id': task-41142, 'name': ReconfigVM_Task, 'duration_secs': 1.114542} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.043033] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Reconfigured VM instance to enable vnc on port - 5901 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 1632.043319] env[61868]: DEBUG oslo_concurrency.lockutils [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 1.566s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1632.043632] env[61868]: DEBUG oslo_concurrency.lockutils [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1632.043840] env[61868]: DEBUG oslo_concurrency.lockutils [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1632.044280] env[61868]: DEBUG oslo_concurrency.lockutils [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1632.044545] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b9baaed-a59b-454b-95c5-e710832bb9a1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.049091] env[61868]: DEBUG oslo_vmware.api [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Waiting for the task: (returnval){ [ 1632.049091] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52d1f4b0-4977-7dae-8d19-f74662a63a66" [ 1632.049091] env[61868]: _type = "Task" [ 1632.049091] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.056565] env[61868]: DEBUG oslo_vmware.api [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52d1f4b0-4977-7dae-8d19-f74662a63a66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.559629] env[61868]: DEBUG oslo_concurrency.lockutils [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1632.559970] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1632.560100] env[61868]: DEBUG oslo_concurrency.lockutils [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1640.383129] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0f9b464c-e123-4fd5-8ae5-5ea586ba268e tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Acquiring lock "b1473dd0-5427-496c-a94c-5772635b229f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1669.351802] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1675.352858] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1675.353291] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1675.353291] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 1675.373785] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1675.373967] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1675.374059] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1675.374188] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1675.374309] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1675.374429] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1675.374546] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1675.374661] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1675.374767] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1675.374884] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1675.374998] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 1676.367730] env[61868]: WARNING oslo_vmware.rw_handles [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1676.367730] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1676.367730] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1676.367730] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1676.367730] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1676.367730] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 1676.367730] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1676.367730] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1676.367730] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1676.367730] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1676.367730] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1676.367730] env[61868]: ERROR oslo_vmware.rw_handles [ 1676.368527] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/ad78c470-e217-4253-ab83-862e32f93f43/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1676.371330] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1676.371868] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Copying Virtual Disk [datastore2] vmware_temp/ad78c470-e217-4253-ab83-862e32f93f43/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/ad78c470-e217-4253-ab83-862e32f93f43/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1676.372283] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-af831f7d-7a8b-45b6-b52a-fa2f194bb391 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.385073] env[61868]: DEBUG oslo_vmware.api [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Waiting for the task: (returnval){ [ 1676.385073] env[61868]: value = "task-41143" [ 1676.385073] env[61868]: _type = "Task" [ 1676.385073] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.394577] env[61868]: DEBUG oslo_vmware.api [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Task: {'id': task-41143, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.588515] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "ad095fd9-abd0-4c75-8d7c-10dcebc2caee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1676.588758] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "ad095fd9-abd0-4c75-8d7c-10dcebc2caee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1676.895897] env[61868]: DEBUG oslo_vmware.exceptions [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1676.896229] env[61868]: DEBUG oslo_concurrency.lockutils [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1676.896989] env[61868]: ERROR nova.compute.manager [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1676.896989] env[61868]: Faults: ['InvalidArgument'] [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Traceback (most recent call last): [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] yield resources [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] self.driver.spawn(context, instance, image_meta, [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] self._fetch_image_if_missing(context, vi) [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] image_cache(vi, tmp_image_ds_loc) [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] vm_util.copy_virtual_disk( [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] session._wait_for_task(vmdk_copy_task) [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] return self.wait_for_task(task_ref) [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] return evt.wait() [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] result = hub.switch() [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] return self.greenlet.switch() [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] self.f(*self.args, **self.kw) [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] raise exceptions.translate_fault(task_info.error) [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Faults: ['InvalidArgument'] [ 1676.896989] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] [ 1676.898211] env[61868]: INFO nova.compute.manager [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Terminating instance [ 1676.899157] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1676.899406] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1676.900051] env[61868]: DEBUG nova.compute.manager [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1676.900249] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1676.900617] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c22e1ddc-447b-4d3d-b2d1-7bd71d05d40d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.903816] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79952e67-376b-432f-9556-e5357bf664dd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.911923] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1676.913164] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f65b7535-6001-405f-ad4a-8c5831776793 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.914760] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1676.914941] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1676.915623] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b701dbd5-341b-40c8-b4cc-49038412383c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.921481] env[61868]: DEBUG oslo_vmware.api [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Waiting for the task: (returnval){ [ 1676.921481] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]522bd762-57ae-9acd-a354-e644bfad6c18" [ 1676.921481] env[61868]: _type = "Task" [ 1676.921481] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.935735] env[61868]: DEBUG oslo_vmware.api [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]522bd762-57ae-9acd-a354-e644bfad6c18, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.995140] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1676.995375] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1676.995545] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Deleting the datastore file [datastore2] f7e1c6c5-752e-4fef-b84f-232b2dbee4a1 {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1676.995829] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3f62b9b8-cc1f-447e-953e-582519ab1932 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.004420] env[61868]: DEBUG oslo_vmware.api [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Waiting for the task: (returnval){ [ 1677.004420] env[61868]: value = "task-41145" [ 1677.004420] env[61868]: _type = "Task" [ 1677.004420] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.013414] env[61868]: DEBUG oslo_vmware.api [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Task: {'id': task-41145, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.369296] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1677.432654] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1677.433188] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Creating directory with path [datastore2] vmware_temp/27bc87b5-078a-49a6-a10e-7e27fa75aa77/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1677.433553] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-84318bff-d261-4bd0-b7c1-c4664d82b9bd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.445947] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Created directory with path [datastore2] vmware_temp/27bc87b5-078a-49a6-a10e-7e27fa75aa77/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1677.446347] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Fetch image to [datastore2] vmware_temp/27bc87b5-078a-49a6-a10e-7e27fa75aa77/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1677.446705] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/27bc87b5-078a-49a6-a10e-7e27fa75aa77/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1677.447598] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d39d13dc-1b3c-4274-bb97-c071fe6d3a49 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.454964] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f596f7-79aa-4fbe-b416-e96b5c992747 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.464419] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c710e73-ba7b-4b2d-b5dc-d897977b1ce9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.496174] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9ca7376-02ee-4995-9665-5e29705b1e27 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.503096] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1ee13a64-bf47-4241-a0c0-3613dd0f5ae3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.513442] env[61868]: DEBUG oslo_vmware.api [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Task: {'id': task-41145, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.085995} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.513938] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1677.514276] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1677.514586] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1677.514904] env[61868]: INFO nova.compute.manager [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1677.517304] env[61868]: DEBUG nova.compute.claims [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1677.517622] env[61868]: DEBUG oslo_concurrency.lockutils [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1677.517967] env[61868]: DEBUG oslo_concurrency.lockutils [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1677.531747] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1677.583244] env[61868]: DEBUG oslo_vmware.rw_handles [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/27bc87b5-078a-49a6-a10e-7e27fa75aa77/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1677.648456] env[61868]: DEBUG oslo_vmware.rw_handles [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1677.648817] env[61868]: DEBUG oslo_vmware.rw_handles [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/27bc87b5-078a-49a6-a10e-7e27fa75aa77/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1677.810793] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27030ecf-8cf2-4d26-865b-43ea6ca73eae {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.819095] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6c94240-d47a-4707-b75d-ca9c4ab06658 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.848946] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-489e6b5c-1cff-4df1-9a56-0c2f888114f2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.857674] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7c14ad6-9f8d-4bbd-8b07-98dd47e3c0b3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.872630] env[61868]: DEBUG nova.compute.provider_tree [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1677.882562] env[61868]: DEBUG nova.scheduler.client.report [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1677.900208] env[61868]: DEBUG oslo_concurrency.lockutils [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.382s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1677.900710] env[61868]: ERROR nova.compute.manager [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1677.900710] env[61868]: Faults: ['InvalidArgument'] [ 1677.900710] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Traceback (most recent call last): [ 1677.900710] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1677.900710] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] self.driver.spawn(context, instance, image_meta, [ 1677.900710] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1677.900710] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1677.900710] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1677.900710] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] self._fetch_image_if_missing(context, vi) [ 1677.900710] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1677.900710] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] image_cache(vi, tmp_image_ds_loc) [ 1677.900710] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1677.900710] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] vm_util.copy_virtual_disk( [ 1677.900710] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1677.900710] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] session._wait_for_task(vmdk_copy_task) [ 1677.900710] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1677.900710] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] return self.wait_for_task(task_ref) [ 1677.900710] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1677.900710] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] return evt.wait() [ 1677.900710] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1677.900710] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] result = hub.switch() [ 1677.900710] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1677.900710] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] return self.greenlet.switch() [ 1677.900710] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1677.900710] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] self.f(*self.args, **self.kw) [ 1677.900710] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1677.900710] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] raise exceptions.translate_fault(task_info.error) [ 1677.900710] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1677.900710] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Faults: ['InvalidArgument'] [ 1677.900710] env[61868]: ERROR nova.compute.manager [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] [ 1677.901767] env[61868]: DEBUG nova.compute.utils [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1677.904363] env[61868]: DEBUG nova.compute.manager [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Build of instance f7e1c6c5-752e-4fef-b84f-232b2dbee4a1 was re-scheduled: A specified parameter was not correct: fileType [ 1677.904363] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1677.904735] env[61868]: DEBUG nova.compute.manager [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1677.904910] env[61868]: DEBUG nova.compute.manager [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1677.905078] env[61868]: DEBUG nova.compute.manager [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1677.905243] env[61868]: DEBUG nova.network.neutron [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1678.358997] env[61868]: DEBUG nova.network.neutron [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1678.373691] env[61868]: INFO nova.compute.manager [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Took 0.47 seconds to deallocate network for instance. [ 1678.483648] env[61868]: INFO nova.scheduler.client.report [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Deleted allocations for instance f7e1c6c5-752e-4fef-b84f-232b2dbee4a1 [ 1678.504227] env[61868]: DEBUG oslo_concurrency.lockutils [None req-03fb4196-531f-46be-b2ef-f094465577c5 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "f7e1c6c5-752e-4fef-b84f-232b2dbee4a1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 590.349s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1678.505469] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d74f9312-c1c0-4e7e-9111-e2948880b17b tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "f7e1c6c5-752e-4fef-b84f-232b2dbee4a1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 394.295s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1678.505719] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d74f9312-c1c0-4e7e-9111-e2948880b17b tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "f7e1c6c5-752e-4fef-b84f-232b2dbee4a1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1678.506002] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d74f9312-c1c0-4e7e-9111-e2948880b17b tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "f7e1c6c5-752e-4fef-b84f-232b2dbee4a1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1678.506238] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d74f9312-c1c0-4e7e-9111-e2948880b17b tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "f7e1c6c5-752e-4fef-b84f-232b2dbee4a1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1678.508660] env[61868]: INFO nova.compute.manager [None req-d74f9312-c1c0-4e7e-9111-e2948880b17b tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Terminating instance [ 1678.512983] env[61868]: DEBUG nova.compute.manager [None req-d74f9312-c1c0-4e7e-9111-e2948880b17b tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1678.513357] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d74f9312-c1c0-4e7e-9111-e2948880b17b tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1678.513804] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cf6da3e2-9dba-4be7-9ac7-6c9683d41c5f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.516964] env[61868]: DEBUG nova.compute.manager [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: 19162a19-9d7c-49b7-ad55-948d2126a61b] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1678.527772] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab4885d7-488a-4de4-81c3-f0b14485ffdd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.544726] env[61868]: DEBUG nova.compute.manager [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: 19162a19-9d7c-49b7-ad55-948d2126a61b] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1678.557352] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-d74f9312-c1c0-4e7e-9111-e2948880b17b tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f7e1c6c5-752e-4fef-b84f-232b2dbee4a1 could not be found. [ 1678.557640] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d74f9312-c1c0-4e7e-9111-e2948880b17b tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1678.557834] env[61868]: INFO nova.compute.manager [None req-d74f9312-c1c0-4e7e-9111-e2948880b17b tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1678.558159] env[61868]: DEBUG oslo.service.loopingcall [None req-d74f9312-c1c0-4e7e-9111-e2948880b17b tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1678.558690] env[61868]: DEBUG nova.compute.manager [-] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1678.558807] env[61868]: DEBUG nova.network.neutron [-] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1678.579267] env[61868]: DEBUG oslo_concurrency.lockutils [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Lock "19162a19-9d7c-49b7-ad55-948d2126a61b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 234.934s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1678.588095] env[61868]: DEBUG nova.network.neutron [-] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1678.591172] env[61868]: DEBUG nova.compute.manager [None req-c04bb216-defb-495c-88ec-335c7ead1a01 tempest-ServerActionsTestJSON-100355986 tempest-ServerActionsTestJSON-100355986-project-member] [instance: 0628d2ec-4e42-4bd2-a819-a6f8e2252469] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1678.597693] env[61868]: INFO nova.compute.manager [-] [instance: f7e1c6c5-752e-4fef-b84f-232b2dbee4a1] Took 0.04 seconds to deallocate network for instance. [ 1678.620933] env[61868]: DEBUG nova.compute.manager [None req-c04bb216-defb-495c-88ec-335c7ead1a01 tempest-ServerActionsTestJSON-100355986 tempest-ServerActionsTestJSON-100355986-project-member] [instance: 0628d2ec-4e42-4bd2-a819-a6f8e2252469] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1678.642689] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c04bb216-defb-495c-88ec-335c7ead1a01 tempest-ServerActionsTestJSON-100355986 tempest-ServerActionsTestJSON-100355986-project-member] Lock "0628d2ec-4e42-4bd2-a819-a6f8e2252469" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.949s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1678.654258] env[61868]: DEBUG nova.compute.manager [None req-ad019fbd-f373-4808-9c8d-14078d5f26a2 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f85e2a53-adea-4581-b453-2b96af0ebc70] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1678.684729] env[61868]: DEBUG nova.compute.manager [None req-ad019fbd-f373-4808-9c8d-14078d5f26a2 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: f85e2a53-adea-4581-b453-2b96af0ebc70] Instance disappeared before build. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1678.690126] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d74f9312-c1c0-4e7e-9111-e2948880b17b tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "f7e1c6c5-752e-4fef-b84f-232b2dbee4a1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.185s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1678.708514] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ad019fbd-f373-4808-9c8d-14078d5f26a2 tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "f85e2a53-adea-4581-b453-2b96af0ebc70" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.224s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1678.719529] env[61868]: DEBUG nova.compute.manager [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1678.771731] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1678.772017] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1678.773531] env[61868]: INFO nova.compute.claims [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1679.017156] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6400d5b-f20a-4716-836e-af14a7361c3d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.025146] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f75d3cd6-e753-48a4-a9d9-87d49bebd8e0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.055318] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d076442a-2cb7-4c19-821e-d9cf2b810fc6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.063263] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfad5a4f-22c0-41ed-8695-1e57d9d435f7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.076541] env[61868]: DEBUG nova.compute.provider_tree [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1679.085418] env[61868]: DEBUG nova.scheduler.client.report [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1679.103886] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.332s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1679.104478] env[61868]: DEBUG nova.compute.manager [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1679.146043] env[61868]: DEBUG nova.compute.utils [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1679.147544] env[61868]: DEBUG nova.compute.manager [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1679.147715] env[61868]: DEBUG nova.network.neutron [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1679.159617] env[61868]: DEBUG nova.compute.manager [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1679.200065] env[61868]: DEBUG nova.policy [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '338e5e02cb8a42cbb75a300f64e7f7c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '33b068be4ffa4855ad04a7e75b130973', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 1679.234460] env[61868]: DEBUG nova.compute.manager [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1679.261435] env[61868]: DEBUG nova.virt.hardware [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T13:01:04Z,direct_url=,disk_format='vmdk',id=e8f68503-5106-4c31-8330-9d2da372531b,min_disk=0,min_ram=0,name='',owner='34f045f2d1234ab2b5d682391804f816',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T13:01:05Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1679.262808] env[61868]: DEBUG nova.virt.hardware [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1679.262808] env[61868]: DEBUG nova.virt.hardware [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1679.262808] env[61868]: DEBUG nova.virt.hardware [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1679.262808] env[61868]: DEBUG nova.virt.hardware [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1679.262808] env[61868]: DEBUG nova.virt.hardware [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1679.263053] env[61868]: DEBUG nova.virt.hardware [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1679.263163] env[61868]: DEBUG nova.virt.hardware [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1679.263380] env[61868]: DEBUG nova.virt.hardware [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1679.263717] env[61868]: DEBUG nova.virt.hardware [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1679.264075] env[61868]: DEBUG nova.virt.hardware [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1679.265313] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b238302f-e5a6-4ced-8b69-a8aa7fe2c910 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.277902] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f787fb-a38f-47a6-905e-3b5a4da47d35 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.351182] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1679.527649] env[61868]: DEBUG nova.network.neutron [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Successfully created port: 8a4e2ad6-7678-4e9e-a7af-1edaf4213a4d {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1680.093296] env[61868]: DEBUG nova.network.neutron [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Successfully updated port: 8a4e2ad6-7678-4e9e-a7af-1edaf4213a4d {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1680.110667] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Acquiring lock "refresh_cache-4a360bf5-a22e-4e94-9274-be83ea0f6e5f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1680.110813] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Acquired lock "refresh_cache-4a360bf5-a22e-4e94-9274-be83ea0f6e5f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1680.111023] env[61868]: DEBUG nova.network.neutron [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1680.157241] env[61868]: DEBUG nova.network.neutron [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1680.313207] env[61868]: DEBUG nova.network.neutron [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Updating instance_info_cache with network_info: [{"id": "8a4e2ad6-7678-4e9e-a7af-1edaf4213a4d", "address": "fa:16:3e:d8:76:46", "network": {"id": "2bfbbe44-229c-4f42-abd2-1e00abdc1232", "bridge": "br-int", "label": "tempest-AttachSCSIVolumeTestJSON-1442657779-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "33b068be4ffa4855ad04a7e75b130973", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb971244-43ba-41b4-a6a2-a4558548012c", "external-id": "nsx-vlan-transportzone-873", "segmentation_id": 873, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a4e2ad6-76", "ovs_interfaceid": "8a4e2ad6-7678-4e9e-a7af-1edaf4213a4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1680.329308] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Releasing lock "refresh_cache-4a360bf5-a22e-4e94-9274-be83ea0f6e5f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1680.329776] env[61868]: DEBUG nova.compute.manager [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Instance network_info: |[{"id": "8a4e2ad6-7678-4e9e-a7af-1edaf4213a4d", "address": "fa:16:3e:d8:76:46", "network": {"id": "2bfbbe44-229c-4f42-abd2-1e00abdc1232", "bridge": "br-int", "label": "tempest-AttachSCSIVolumeTestJSON-1442657779-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "33b068be4ffa4855ad04a7e75b130973", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb971244-43ba-41b4-a6a2-a4558548012c", "external-id": "nsx-vlan-transportzone-873", "segmentation_id": 873, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a4e2ad6-76", "ovs_interfaceid": "8a4e2ad6-7678-4e9e-a7af-1edaf4213a4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1680.330056] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:76:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cb971244-43ba-41b4-a6a2-a4558548012c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8a4e2ad6-7678-4e9e-a7af-1edaf4213a4d', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1680.337883] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Creating folder: Project (33b068be4ffa4855ad04a7e75b130973). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1680.338597] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-85c6f995-97ad-40a9-b4b1-404699d87a91 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.351914] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1680.354818] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Created folder: Project (33b068be4ffa4855ad04a7e75b130973) in parent group-v18181. [ 1680.355038] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Creating folder: Instances. Parent ref: group-v18291. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1680.355301] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ace1bc58-2121-43ab-b067-cf342c945491 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.365861] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Created folder: Instances in parent group-v18291. [ 1680.366125] env[61868]: DEBUG oslo.service.loopingcall [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1680.366333] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1680.366549] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f67914cb-9d94-4dc4-aab1-19df36dace3f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.387702] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1680.387702] env[61868]: value = "task-41148" [ 1680.387702] env[61868]: _type = "Task" [ 1680.387702] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.396677] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41148, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.402488] env[61868]: DEBUG nova.compute.manager [req-72a81f89-b487-4a1f-9b08-df648630445a req-25f7c9ac-8c5f-4163-8e73-ccc2e5933693 service nova] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Received event network-vif-plugged-8a4e2ad6-7678-4e9e-a7af-1edaf4213a4d {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1680.402812] env[61868]: DEBUG oslo_concurrency.lockutils [req-72a81f89-b487-4a1f-9b08-df648630445a req-25f7c9ac-8c5f-4163-8e73-ccc2e5933693 service nova] Acquiring lock "4a360bf5-a22e-4e94-9274-be83ea0f6e5f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1680.402918] env[61868]: DEBUG oslo_concurrency.lockutils [req-72a81f89-b487-4a1f-9b08-df648630445a req-25f7c9ac-8c5f-4163-8e73-ccc2e5933693 service nova] Lock "4a360bf5-a22e-4e94-9274-be83ea0f6e5f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1680.403078] env[61868]: DEBUG oslo_concurrency.lockutils [req-72a81f89-b487-4a1f-9b08-df648630445a req-25f7c9ac-8c5f-4163-8e73-ccc2e5933693 service nova] Lock "4a360bf5-a22e-4e94-9274-be83ea0f6e5f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1680.403238] env[61868]: DEBUG nova.compute.manager [req-72a81f89-b487-4a1f-9b08-df648630445a req-25f7c9ac-8c5f-4163-8e73-ccc2e5933693 service nova] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] No waiting events found dispatching network-vif-plugged-8a4e2ad6-7678-4e9e-a7af-1edaf4213a4d {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1680.403427] env[61868]: WARNING nova.compute.manager [req-72a81f89-b487-4a1f-9b08-df648630445a req-25f7c9ac-8c5f-4163-8e73-ccc2e5933693 service nova] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Received unexpected event network-vif-plugged-8a4e2ad6-7678-4e9e-a7af-1edaf4213a4d for instance with vm_state building and task_state spawning. [ 1680.403544] env[61868]: DEBUG nova.compute.manager [req-72a81f89-b487-4a1f-9b08-df648630445a req-25f7c9ac-8c5f-4163-8e73-ccc2e5933693 service nova] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Received event network-changed-8a4e2ad6-7678-4e9e-a7af-1edaf4213a4d {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1680.403691] env[61868]: DEBUG nova.compute.manager [req-72a81f89-b487-4a1f-9b08-df648630445a req-25f7c9ac-8c5f-4163-8e73-ccc2e5933693 service nova] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Refreshing instance network info cache due to event network-changed-8a4e2ad6-7678-4e9e-a7af-1edaf4213a4d. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1680.403902] env[61868]: DEBUG oslo_concurrency.lockutils [req-72a81f89-b487-4a1f-9b08-df648630445a req-25f7c9ac-8c5f-4163-8e73-ccc2e5933693 service nova] Acquiring lock "refresh_cache-4a360bf5-a22e-4e94-9274-be83ea0f6e5f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1680.404091] env[61868]: DEBUG oslo_concurrency.lockutils [req-72a81f89-b487-4a1f-9b08-df648630445a req-25f7c9ac-8c5f-4163-8e73-ccc2e5933693 service nova] Acquired lock "refresh_cache-4a360bf5-a22e-4e94-9274-be83ea0f6e5f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1680.404271] env[61868]: DEBUG nova.network.neutron [req-72a81f89-b487-4a1f-9b08-df648630445a req-25f7c9ac-8c5f-4163-8e73-ccc2e5933693 service nova] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Refreshing network info cache for port 8a4e2ad6-7678-4e9e-a7af-1edaf4213a4d {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1680.660263] env[61868]: DEBUG nova.network.neutron [req-72a81f89-b487-4a1f-9b08-df648630445a req-25f7c9ac-8c5f-4163-8e73-ccc2e5933693 service nova] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Updated VIF entry in instance network info cache for port 8a4e2ad6-7678-4e9e-a7af-1edaf4213a4d. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1680.660673] env[61868]: DEBUG nova.network.neutron [req-72a81f89-b487-4a1f-9b08-df648630445a req-25f7c9ac-8c5f-4163-8e73-ccc2e5933693 service nova] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Updating instance_info_cache with network_info: [{"id": "8a4e2ad6-7678-4e9e-a7af-1edaf4213a4d", "address": "fa:16:3e:d8:76:46", "network": {"id": "2bfbbe44-229c-4f42-abd2-1e00abdc1232", "bridge": "br-int", "label": "tempest-AttachSCSIVolumeTestJSON-1442657779-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "33b068be4ffa4855ad04a7e75b130973", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb971244-43ba-41b4-a6a2-a4558548012c", "external-id": "nsx-vlan-transportzone-873", "segmentation_id": 873, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a4e2ad6-76", "ovs_interfaceid": "8a4e2ad6-7678-4e9e-a7af-1edaf4213a4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1680.671111] env[61868]: DEBUG oslo_concurrency.lockutils [req-72a81f89-b487-4a1f-9b08-df648630445a req-25f7c9ac-8c5f-4163-8e73-ccc2e5933693 service nova] Releasing lock "refresh_cache-4a360bf5-a22e-4e94-9274-be83ea0f6e5f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1680.899088] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41148, 'name': CreateVM_Task, 'duration_secs': 0.290727} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.899299] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1680.899831] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1680.900148] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1680.902999] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c635e325-d428-4b3f-8412-964a1bb52205 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.935378] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Reconfiguring VM instance to enable vnc on port - 5903 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 1680.935694] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-afd08527-3171-4329-8d49-6ec659d0e319 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.951352] env[61868]: DEBUG oslo_vmware.api [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Waiting for the task: (returnval){ [ 1680.951352] env[61868]: value = "task-41149" [ 1680.951352] env[61868]: _type = "Task" [ 1680.951352] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.959605] env[61868]: DEBUG oslo_vmware.api [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Task: {'id': task-41149, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.351048] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1681.361704] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1681.361912] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1681.362210] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1681.362442] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1681.363687] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45452917-9832-429f-9962-b409e3ed9db6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.372830] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb7fc22e-fce9-4eae-8f61-2a55346ed38e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.387821] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d073295-9aee-45dd-b5bb-02481782c14c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.394729] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e38d52e1-37c8-4585-9af2-26e8023847a8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.425558] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181916MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1681.425736] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1681.425844] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1681.462412] env[61868]: DEBUG oslo_vmware.api [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Task: {'id': task-41149, 'name': ReconfigVM_Task, 'duration_secs': 0.106495} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.467269] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Reconfigured VM instance to enable vnc on port - 5903 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 1681.467488] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.567s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1681.467735] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e8f68503-5106-4c31-8330-9d2da372531b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1681.467881] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e8f68503-5106-4c31-8330-9d2da372531b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1681.468261] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e8f68503-5106-4c31-8330-9d2da372531b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1681.468975] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cfd2f7e8-0c07-46f5-9fd4-cd43c26d35f7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.474080] env[61868]: DEBUG oslo_vmware.api [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Waiting for the task: (returnval){ [ 1681.474080] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]5240f832-959f-20c8-cae4-ab4682076397" [ 1681.474080] env[61868]: _type = "Task" [ 1681.474080] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.486802] env[61868]: DEBUG oslo_vmware.api [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]5240f832-959f-20c8-cae4-ab4682076397, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.497898] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance eafa3522-51e3-4582-b060-3e3ac4224ae2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1681.498025] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance c9f74904-0558-42e6-a454-c7103b2873b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1681.498104] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance efb3f108-d3b3-4ebf-a51f-84dc8274f857 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1681.498225] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ebd4070e-7944-4d2f-8668-01d0ceca0c67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1681.498343] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d6f7828e-6617-40ca-9f6c-e3a72c328dc9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1681.498458] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 95ba0df5-846c-4e5d-94e6-fd9c43dcc191 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1681.498571] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d69cb3f5-b385-432a-b562-87d0b1b0877b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1681.498683] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 97840d8b-90ee-432e-988a-30548b61381b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1681.498793] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance b1473dd0-5427-496c-a94c-5772635b229f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1681.498921] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4a360bf5-a22e-4e94-9274-be83ea0f6e5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1681.537132] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance c6c6e502-a2aa-4f9c-be05-bf6b50078abf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1681.550307] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 36be944d-04bc-45cd-8019-173437f8ffa5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1681.562941] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance e453b684-a54b-46b3-b9ea-4ab9352965f7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1681.572822] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ad095fd9-abd0-4c75-8d7c-10dcebc2caee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1681.573059] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1681.573209] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1681.754672] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2283379-ab7b-43ef-98a9-ed60cd769db0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.763129] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4a15305-3246-48c2-a922-f4880a80bb3c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.793258] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ada7369-d292-4a94-8151-54934d1ec7db {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.801202] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c07854-4e58-47ab-875b-f7b3e36a9bc4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.816136] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1681.824658] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1681.842353] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1681.842553] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.417s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1681.983924] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e8f68503-5106-4c31-8330-9d2da372531b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1681.984203] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Processing image e8f68503-5106-4c31-8330-9d2da372531b {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1681.984450] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e8f68503-5106-4c31-8330-9d2da372531b/e8f68503-5106-4c31-8330-9d2da372531b.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1681.984597] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e8f68503-5106-4c31-8330-9d2da372531b/e8f68503-5106-4c31-8330-9d2da372531b.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1681.984773] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1681.985032] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f4b5779-0503-4803-a1fa-0d2d99e49710 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.002600] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1682.002813] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1682.003570] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a589eea-0cf3-4a73-ae6d-9c461ae7b894 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.009479] env[61868]: DEBUG oslo_vmware.api [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Waiting for the task: (returnval){ [ 1682.009479] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52819c6e-495f-d660-a357-1b7e986b9e24" [ 1682.009479] env[61868]: _type = "Task" [ 1682.009479] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.017622] env[61868]: DEBUG oslo_vmware.api [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52819c6e-495f-d660-a357-1b7e986b9e24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.521074] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1682.521325] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Creating directory with path [datastore2] vmware_temp/73df7dd6-5a5e-4b24-8521-68d2f1580c61/e8f68503-5106-4c31-8330-9d2da372531b {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1682.521580] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-954bfabc-e4c4-4985-9522-cd72f80c9e90 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.541678] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Created directory with path [datastore2] vmware_temp/73df7dd6-5a5e-4b24-8521-68d2f1580c61/e8f68503-5106-4c31-8330-9d2da372531b {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1682.541883] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Creating Virtual Disk of size 49472.0 KB and adapter type None on the data store {{(pid=61868) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1373}} [ 1682.542604] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-a43c17d7-a9f1-49e5-8837-3df853c66205 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.547190] env[61868]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1682.547340] env[61868]: DEBUG oslo_vmware.api [-] Fault list: [InvalidRequest] {{(pid=61868) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1682.547408] env[61868]: DEBUG oslo_vmware.exceptions [-] Fault InvalidRequest not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1682.547673] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e8f68503-5106-4c31-8330-9d2da372531b/e8f68503-5106-4c31-8330-9d2da372531b.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1682.548828] env[61868]: ERROR nova.compute.manager [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: [ 1682.548828] env[61868]: Required property adapterType is missing from data object of type FileBackedVirtualDiskSpec [ 1682.548828] env[61868]: while parsing serialized DataObject of type vim.VirtualDiskManager.FileBackedVirtualDiskSpec [ 1682.548828] env[61868]: at line 1, column 685 [ 1682.548828] env[61868]: while parsing call information for method CreateVirtualDisk_Task [ 1682.548828] env[61868]: at line 1, column 366 [ 1682.548828] env[61868]: while parsing SOAP body [ 1682.548828] env[61868]: at line 1, column 356 [ 1682.548828] env[61868]: while parsing SOAP envelope [ 1682.548828] env[61868]: at line 1, column 38 [ 1682.548828] env[61868]: while parsing HTTP request for method createVirtualDisk [ 1682.548828] env[61868]: on object of type vim.VirtualDiskManager [ 1682.548828] env[61868]: at line 1, column 0 [ 1682.548828] env[61868]: Cause: Server raised fault: ' [ 1682.548828] env[61868]: Required property adapterType is missing from data object of type FileBackedVirtualDiskSpec [ 1682.548828] env[61868]: while parsing serialized DataObject of type vim.VirtualDiskManager.FileBackedVirtualDiskSpec [ 1682.548828] env[61868]: at line 1, column 685 [ 1682.548828] env[61868]: while parsing call information for method CreateVirtualDisk_Task [ 1682.548828] env[61868]: at line 1, column 366 [ 1682.548828] env[61868]: while parsing SOAP body [ 1682.548828] env[61868]: at line 1, column 356 [ 1682.548828] env[61868]: while parsing SOAP envelope [ 1682.548828] env[61868]: at line 1, column 38 [ 1682.548828] env[61868]: while parsing HTTP request for method createVirtualDisk [ 1682.548828] env[61868]: on object of type vim.VirtualDiskManager [ 1682.548828] env[61868]: at line 1, column 0' [ 1682.548828] env[61868]: Faults: [InvalidRequest] [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Traceback (most recent call last): [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] response = request(managed_object, **kwargs) [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__ [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] return client.invoke(args, kwargs) [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] result = self.send(soapenv, timeout=timeout) [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] return self.process_reply(reply.message, None, None) [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] raise WebFault(fault, replyroot) [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] suds.WebFault: Server raised fault: ' [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Required property adapterType is missing from data object of type FileBackedVirtualDiskSpec [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing serialized DataObject of type vim.VirtualDiskManager.FileBackedVirtualDiskSpec [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 685 [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing call information for method CreateVirtualDisk_Task [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 366 [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing SOAP body [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 356 [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing SOAP envelope [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 38 [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing HTTP request for method createVirtualDisk [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] on object of type vim.VirtualDiskManager [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 0' [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] During handling of the above exception, another exception occurred: [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Traceback (most recent call last): [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1682.548828] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] yield resources [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] self.driver.spawn(context, instance, image_meta, [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] self._fetch_image_if_missing(context, vi) [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 634, in _fetch_image_if_missing [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] tmp_dir_loc, tmp_image_ds_loc = image_prepare(vi) [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 491, in _prepare_flat_image [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] vm_util.create_virtual_disk( [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1385, in create_virtual_disk [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] vmdk_create_task = session._call_method( [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 125, in _call_method [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] return self.invoke_api(module, method, *args, **kwargs) [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] return _invoke_api(module, method, *args, **kwargs) [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] return evt.wait() [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] result = hub.switch() [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] return self.greenlet.switch() [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] idle = self.f(*self.args, **self.kw) [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] result = f(*args, **kwargs) [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] return api_method(*args, **kwargs) [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] raise exceptions.VimFaultException(fault_list, fault_string, [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] oslo_vmware.exceptions.VimFaultException: [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Required property adapterType is missing from data object of type FileBackedVirtualDiskSpec [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing serialized DataObject of type vim.VirtualDiskManager.FileBackedVirtualDiskSpec [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 685 [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing call information for method CreateVirtualDisk_Task [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 366 [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing SOAP body [ 1682.551117] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 356 [ 1682.552591] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1682.552591] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing SOAP envelope [ 1682.552591] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 38 [ 1682.552591] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1682.552591] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing HTTP request for method createVirtualDisk [ 1682.552591] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] on object of type vim.VirtualDiskManager [ 1682.552591] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 0 [ 1682.552591] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Cause: Server raised fault: ' [ 1682.552591] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Required property adapterType is missing from data object of type FileBackedVirtualDiskSpec [ 1682.552591] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1682.552591] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing serialized DataObject of type vim.VirtualDiskManager.FileBackedVirtualDiskSpec [ 1682.552591] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 685 [ 1682.552591] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1682.552591] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing call information for method CreateVirtualDisk_Task [ 1682.552591] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 366 [ 1682.552591] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1682.552591] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing SOAP body [ 1682.552591] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 356 [ 1682.552591] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1682.552591] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing SOAP envelope [ 1682.552591] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 38 [ 1682.552591] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1682.552591] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing HTTP request for method createVirtualDisk [ 1682.552591] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] on object of type vim.VirtualDiskManager [ 1682.552591] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 0' [ 1682.552591] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Faults: [InvalidRequest] [ 1682.552591] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1682.552591] env[61868]: INFO nova.compute.manager [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Terminating instance [ 1682.552591] env[61868]: DEBUG nova.compute.manager [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1682.552591] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1682.553564] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e94c7558-6f52-4112-9d7b-b5b3495c43aa {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.559717] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1682.559946] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c63253b0-8a47-41be-a99a-2a583be171ff {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.619908] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1682.620271] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1682.620532] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Deleting the datastore file [datastore2] 4a360bf5-a22e-4e94-9274-be83ea0f6e5f {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1682.620850] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2ef0c682-484c-405d-b5c9-beca88a06861 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.628251] env[61868]: DEBUG oslo_vmware.api [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Waiting for the task: (returnval){ [ 1682.628251] env[61868]: value = "task-41151" [ 1682.628251] env[61868]: _type = "Task" [ 1682.628251] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.636989] env[61868]: DEBUG oslo_vmware.api [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Task: {'id': task-41151, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.842934] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1683.139400] env[61868]: DEBUG oslo_vmware.api [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Task: {'id': task-41151, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072626} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.139667] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1683.139847] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1683.140086] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1683.140329] env[61868]: INFO nova.compute.manager [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1683.142561] env[61868]: DEBUG nova.compute.claims [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1683.142733] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1683.142947] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1683.412524] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d05524d-ab91-4788-ad07-560037e3481b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.421967] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-947ac087-e0bc-4c7e-b274-63a7857960df {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.254155] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d5eee2-9350-4abc-bbe0-f760fcc1cd83 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.262736] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5cf639c-51dd-44d5-895f-2e6e24238317 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.277460] env[61868]: DEBUG nova.compute.provider_tree [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1684.288750] env[61868]: DEBUG nova.scheduler.client.report [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1684.306098] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.163s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1684.306964] env[61868]: ERROR nova.compute.manager [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: [ 1684.306964] env[61868]: Required property adapterType is missing from data object of type FileBackedVirtualDiskSpec [ 1684.306964] env[61868]: while parsing serialized DataObject of type vim.VirtualDiskManager.FileBackedVirtualDiskSpec [ 1684.306964] env[61868]: at line 1, column 685 [ 1684.306964] env[61868]: while parsing call information for method CreateVirtualDisk_Task [ 1684.306964] env[61868]: at line 1, column 366 [ 1684.306964] env[61868]: while parsing SOAP body [ 1684.306964] env[61868]: at line 1, column 356 [ 1684.306964] env[61868]: while parsing SOAP envelope [ 1684.306964] env[61868]: at line 1, column 38 [ 1684.306964] env[61868]: while parsing HTTP request for method createVirtualDisk [ 1684.306964] env[61868]: on object of type vim.VirtualDiskManager [ 1684.306964] env[61868]: at line 1, column 0 [ 1684.306964] env[61868]: Cause: Server raised fault: ' [ 1684.306964] env[61868]: Required property adapterType is missing from data object of type FileBackedVirtualDiskSpec [ 1684.306964] env[61868]: while parsing serialized DataObject of type vim.VirtualDiskManager.FileBackedVirtualDiskSpec [ 1684.306964] env[61868]: at line 1, column 685 [ 1684.306964] env[61868]: while parsing call information for method CreateVirtualDisk_Task [ 1684.306964] env[61868]: at line 1, column 366 [ 1684.306964] env[61868]: while parsing SOAP body [ 1684.306964] env[61868]: at line 1, column 356 [ 1684.306964] env[61868]: while parsing SOAP envelope [ 1684.306964] env[61868]: at line 1, column 38 [ 1684.306964] env[61868]: while parsing HTTP request for method createVirtualDisk [ 1684.306964] env[61868]: on object of type vim.VirtualDiskManager [ 1684.306964] env[61868]: at line 1, column 0' [ 1684.306964] env[61868]: Faults: [InvalidRequest] [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Traceback (most recent call last): [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] response = request(managed_object, **kwargs) [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__ [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] return client.invoke(args, kwargs) [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] result = self.send(soapenv, timeout=timeout) [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] return self.process_reply(reply.message, None, None) [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] raise WebFault(fault, replyroot) [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] suds.WebFault: Server raised fault: ' [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Required property adapterType is missing from data object of type FileBackedVirtualDiskSpec [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing serialized DataObject of type vim.VirtualDiskManager.FileBackedVirtualDiskSpec [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 685 [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing call information for method CreateVirtualDisk_Task [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 366 [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing SOAP body [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 356 [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing SOAP envelope [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 38 [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing HTTP request for method createVirtualDisk [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] on object of type vim.VirtualDiskManager [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 0' [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] During handling of the above exception, another exception occurred: [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Traceback (most recent call last): [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1684.306964] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] self.driver.spawn(context, instance, image_meta, [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] self._fetch_image_if_missing(context, vi) [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 634, in _fetch_image_if_missing [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] tmp_dir_loc, tmp_image_ds_loc = image_prepare(vi) [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 491, in _prepare_flat_image [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] vm_util.create_virtual_disk( [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1385, in create_virtual_disk [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] vmdk_create_task = session._call_method( [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 125, in _call_method [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] return self.invoke_api(module, method, *args, **kwargs) [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] return _invoke_api(module, method, *args, **kwargs) [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] return evt.wait() [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] result = hub.switch() [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] return self.greenlet.switch() [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] idle = self.f(*self.args, **self.kw) [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] result = f(*args, **kwargs) [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] return api_method(*args, **kwargs) [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] raise exceptions.VimFaultException(fault_list, fault_string, [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] oslo_vmware.exceptions.VimFaultException: [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Required property adapterType is missing from data object of type FileBackedVirtualDiskSpec [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing serialized DataObject of type vim.VirtualDiskManager.FileBackedVirtualDiskSpec [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 685 [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing call information for method CreateVirtualDisk_Task [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 366 [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing SOAP body [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 356 [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing SOAP envelope [ 1684.309564] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 38 [ 1684.311798] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1684.311798] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing HTTP request for method createVirtualDisk [ 1684.311798] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] on object of type vim.VirtualDiskManager [ 1684.311798] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 0 [ 1684.311798] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Cause: Server raised fault: ' [ 1684.311798] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Required property adapterType is missing from data object of type FileBackedVirtualDiskSpec [ 1684.311798] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1684.311798] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing serialized DataObject of type vim.VirtualDiskManager.FileBackedVirtualDiskSpec [ 1684.311798] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 685 [ 1684.311798] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1684.311798] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing call information for method CreateVirtualDisk_Task [ 1684.311798] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 366 [ 1684.311798] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1684.311798] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing SOAP body [ 1684.311798] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 356 [ 1684.311798] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1684.311798] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing SOAP envelope [ 1684.311798] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 38 [ 1684.311798] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1684.311798] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] while parsing HTTP request for method createVirtualDisk [ 1684.311798] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] on object of type vim.VirtualDiskManager [ 1684.311798] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] at line 1, column 0' [ 1684.311798] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Faults: [InvalidRequest] [ 1684.311798] env[61868]: ERROR nova.compute.manager [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] [ 1684.311798] env[61868]: DEBUG nova.compute.utils [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1684.311798] env[61868]: DEBUG nova.compute.manager [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Build of instance 4a360bf5-a22e-4e94-9274-be83ea0f6e5f was re-scheduled: [ 1684.311798] env[61868]: Required property adapterType is missing from data object of type FileBackedVirtualDiskSpec [ 1684.311798] env[61868]: while parsing serialized DataObject of type vim.VirtualDiskManager.FileBackedVirtualDiskSpec [ 1684.311798] env[61868]: at line 1, column 685 [ 1684.311798] env[61868]: while parsing call information for method CreateVirtualDisk_Task [ 1684.311798] env[61868]: at line 1, column 366 [ 1684.311798] env[61868]: while parsing SOAP body [ 1684.311798] env[61868]: at line 1, column 356 [ 1684.311798] env[61868]: while parsing SOAP envelope [ 1684.311798] env[61868]: at line 1, column 38 [ 1684.311798] env[61868]: while parsing HTTP request for method createVirtualDisk [ 1684.311798] env[61868]: on object of type vim.VirtualDiskManager [ 1684.311798] env[61868]: at line 1, column 0 [ 1684.311798] env[61868]: Cause: Server raised fault: ' [ 1684.311798] env[61868]: Required property adapterType is missing from data object of type FileBackedVirtualDiskSpec [ 1684.311798] env[61868]: while parsing serialized DataObject of type vim.VirtualDiskManager.FileBackedVirtualDiskSpec [ 1684.311798] env[61868]: at line 1, column 685 [ 1684.311798] env[61868]: while parsing call information for method CreateVirtualDisk_Task [ 1684.311798] env[61868]: at line 1, column 366 [ 1684.311798] env[61868]: while parsing SOAP body [ 1684.311798] env[61868]: at line 1, column 356 [ 1684.311798] env[61868]: while parsing SOAP envelope [ 1684.311798] env[61868]: at line 1, column 38 [ 1684.311798] env[61868]: while parsing HTTP request for method createVirtualDisk [ 1684.311798] env[61868]: on object of type vim.VirtualDiskManager [ 1684.311798] env[61868]: at line 1, column 0' [ 1684.311798] env[61868]: Faults: [InvalidRequest] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1684.311798] env[61868]: DEBUG nova.compute.manager [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1684.311798] env[61868]: DEBUG nova.compute.manager [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1684.311798] env[61868]: DEBUG nova.compute.manager [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1684.314863] env[61868]: DEBUG nova.network.neutron [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1684.589738] env[61868]: DEBUG nova.network.neutron [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1684.604088] env[61868]: INFO nova.compute.manager [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] [instance: 4a360bf5-a22e-4e94-9274-be83ea0f6e5f] Took 0.29 seconds to deallocate network for instance. [ 1684.711766] env[61868]: INFO nova.scheduler.client.report [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Deleted allocations for instance 4a360bf5-a22e-4e94-9274-be83ea0f6e5f [ 1684.733990] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6f8ac291-6e64-45d3-8422-b24e8b422c0d tempest-AttachSCSIVolumeTestJSON-274273257 tempest-AttachSCSIVolumeTestJSON-274273257-project-member] Lock "4a360bf5-a22e-4e94-9274-be83ea0f6e5f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 147.890s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1684.752687] env[61868]: DEBUG nova.compute.manager [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1684.808124] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1684.808391] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1684.809866] env[61868]: INFO nova.compute.claims [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1685.035178] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cda28ce-a631-4ccf-8a4e-43fa1a59f594 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.043363] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3725ddcc-8e9e-4793-8320-6b272964345e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.077143] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87dba782-82b8-4577-8c15-86d1ff9780fa {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.085449] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bf24afb-84b5-40f1-8819-9a89f079f702 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.101183] env[61868]: DEBUG nova.compute.provider_tree [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1685.110403] env[61868]: DEBUG nova.scheduler.client.report [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1685.128364] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.320s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1685.129204] env[61868]: DEBUG nova.compute.manager [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1685.170861] env[61868]: DEBUG nova.compute.utils [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1685.172380] env[61868]: DEBUG nova.compute.manager [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1685.172549] env[61868]: DEBUG nova.network.neutron [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1685.187287] env[61868]: DEBUG nova.compute.manager [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1685.226377] env[61868]: DEBUG nova.policy [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '24d6cbc0e4524f1295d7db31a331f478', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f938c85a4daf4a84b0e00619048dcf9f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 1685.287545] env[61868]: DEBUG nova.compute.manager [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1685.315454] env[61868]: DEBUG nova.virt.hardware [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1685.315781] env[61868]: DEBUG nova.virt.hardware [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1685.315981] env[61868]: DEBUG nova.virt.hardware [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1685.316235] env[61868]: DEBUG nova.virt.hardware [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1685.316420] env[61868]: DEBUG nova.virt.hardware [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1685.316604] env[61868]: DEBUG nova.virt.hardware [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1685.316854] env[61868]: DEBUG nova.virt.hardware [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1685.317053] env[61868]: DEBUG nova.virt.hardware [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1685.317262] env[61868]: DEBUG nova.virt.hardware [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1685.317463] env[61868]: DEBUG nova.virt.hardware [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1685.317694] env[61868]: DEBUG nova.virt.hardware [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1685.318605] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca9c96fa-4ea5-42d4-9fab-d6376df7b8af {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.328009] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3cc7632-2978-4ddd-983c-a7ea549363cc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.351420] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1685.351702] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1685.351892] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 1685.524077] env[61868]: DEBUG nova.network.neutron [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Successfully created port: 8778757e-fca1-410f-8a35-ca0c49b2d8f0 {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1686.047002] env[61868]: DEBUG nova.network.neutron [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Successfully updated port: 8778757e-fca1-410f-8a35-ca0c49b2d8f0 {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1686.060801] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Acquiring lock "refresh_cache-c6c6e502-a2aa-4f9c-be05-bf6b50078abf" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1686.061235] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Acquired lock "refresh_cache-c6c6e502-a2aa-4f9c-be05-bf6b50078abf" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1686.061235] env[61868]: DEBUG nova.network.neutron [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1686.098865] env[61868]: DEBUG nova.network.neutron [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1686.474309] env[61868]: DEBUG nova.network.neutron [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Updating instance_info_cache with network_info: [{"id": "8778757e-fca1-410f-8a35-ca0c49b2d8f0", "address": "fa:16:3e:0b:92:b5", "network": {"id": "7b97be17-12d8-40ed-92f8-ea9039a80005", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2055718177-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "f938c85a4daf4a84b0e00619048dcf9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab0428e-1be7-475e-80e3-1f0aa08d4f86", "external-id": "nsx-vlan-transportzone-976", "segmentation_id": 976, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8778757e-fc", "ovs_interfaceid": "8778757e-fca1-410f-8a35-ca0c49b2d8f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1686.490845] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Releasing lock "refresh_cache-c6c6e502-a2aa-4f9c-be05-bf6b50078abf" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1686.491218] env[61868]: DEBUG nova.compute.manager [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Instance network_info: |[{"id": "8778757e-fca1-410f-8a35-ca0c49b2d8f0", "address": "fa:16:3e:0b:92:b5", "network": {"id": "7b97be17-12d8-40ed-92f8-ea9039a80005", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2055718177-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "f938c85a4daf4a84b0e00619048dcf9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab0428e-1be7-475e-80e3-1f0aa08d4f86", "external-id": "nsx-vlan-transportzone-976", "segmentation_id": 976, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8778757e-fc", "ovs_interfaceid": "8778757e-fca1-410f-8a35-ca0c49b2d8f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1686.491634] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0b:92:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ab0428e-1be7-475e-80e3-1f0aa08d4f86', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8778757e-fca1-410f-8a35-ca0c49b2d8f0', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1686.498955] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Creating folder: Project (f938c85a4daf4a84b0e00619048dcf9f). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1686.499574] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dd6688d2-c39a-463e-9307-27ffb4448de9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.520052] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Created folder: Project (f938c85a4daf4a84b0e00619048dcf9f) in parent group-v18181. [ 1686.520244] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Creating folder: Instances. Parent ref: group-v18296. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1686.520592] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7474e0a9-fd08-487e-885b-aed1b5a1b9eb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.530822] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Created folder: Instances in parent group-v18296. [ 1686.531159] env[61868]: DEBUG oslo.service.loopingcall [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1686.531369] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1686.531576] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8a7fd037-f7e7-4d49-a2f7-8468bb68b4ff {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.559269] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "fea9d55d-d045-4d4e-b647-044e4729f21c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1686.559539] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "fea9d55d-d045-4d4e-b647-044e4729f21c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1686.564270] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1686.564270] env[61868]: value = "task-41154" [ 1686.564270] env[61868]: _type = "Task" [ 1686.564270] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.575526] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41154, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.866618] env[61868]: DEBUG nova.compute.manager [req-1adcd760-54ee-41a2-8187-bd12a81fa9ed req-779f9c6f-0b4d-4d47-b4a5-21d15c52576d service nova] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Received event network-vif-plugged-8778757e-fca1-410f-8a35-ca0c49b2d8f0 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1686.866854] env[61868]: DEBUG oslo_concurrency.lockutils [req-1adcd760-54ee-41a2-8187-bd12a81fa9ed req-779f9c6f-0b4d-4d47-b4a5-21d15c52576d service nova] Acquiring lock "c6c6e502-a2aa-4f9c-be05-bf6b50078abf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1686.867061] env[61868]: DEBUG oslo_concurrency.lockutils [req-1adcd760-54ee-41a2-8187-bd12a81fa9ed req-779f9c6f-0b4d-4d47-b4a5-21d15c52576d service nova] Lock "c6c6e502-a2aa-4f9c-be05-bf6b50078abf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1686.867228] env[61868]: DEBUG oslo_concurrency.lockutils [req-1adcd760-54ee-41a2-8187-bd12a81fa9ed req-779f9c6f-0b4d-4d47-b4a5-21d15c52576d service nova] Lock "c6c6e502-a2aa-4f9c-be05-bf6b50078abf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1686.867395] env[61868]: DEBUG nova.compute.manager [req-1adcd760-54ee-41a2-8187-bd12a81fa9ed req-779f9c6f-0b4d-4d47-b4a5-21d15c52576d service nova] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] No waiting events found dispatching network-vif-plugged-8778757e-fca1-410f-8a35-ca0c49b2d8f0 {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1686.867559] env[61868]: WARNING nova.compute.manager [req-1adcd760-54ee-41a2-8187-bd12a81fa9ed req-779f9c6f-0b4d-4d47-b4a5-21d15c52576d service nova] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Received unexpected event network-vif-plugged-8778757e-fca1-410f-8a35-ca0c49b2d8f0 for instance with vm_state building and task_state spawning. [ 1686.867716] env[61868]: DEBUG nova.compute.manager [req-1adcd760-54ee-41a2-8187-bd12a81fa9ed req-779f9c6f-0b4d-4d47-b4a5-21d15c52576d service nova] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Received event network-changed-8778757e-fca1-410f-8a35-ca0c49b2d8f0 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1686.867868] env[61868]: DEBUG nova.compute.manager [req-1adcd760-54ee-41a2-8187-bd12a81fa9ed req-779f9c6f-0b4d-4d47-b4a5-21d15c52576d service nova] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Refreshing instance network info cache due to event network-changed-8778757e-fca1-410f-8a35-ca0c49b2d8f0. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1686.868061] env[61868]: DEBUG oslo_concurrency.lockutils [req-1adcd760-54ee-41a2-8187-bd12a81fa9ed req-779f9c6f-0b4d-4d47-b4a5-21d15c52576d service nova] Acquiring lock "refresh_cache-c6c6e502-a2aa-4f9c-be05-bf6b50078abf" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1686.868201] env[61868]: DEBUG oslo_concurrency.lockutils [req-1adcd760-54ee-41a2-8187-bd12a81fa9ed req-779f9c6f-0b4d-4d47-b4a5-21d15c52576d service nova] Acquired lock "refresh_cache-c6c6e502-a2aa-4f9c-be05-bf6b50078abf" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1686.868380] env[61868]: DEBUG nova.network.neutron [req-1adcd760-54ee-41a2-8187-bd12a81fa9ed req-779f9c6f-0b4d-4d47-b4a5-21d15c52576d service nova] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Refreshing network info cache for port 8778757e-fca1-410f-8a35-ca0c49b2d8f0 {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1687.075031] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41154, 'name': CreateVM_Task, 'duration_secs': 0.295348} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.075228] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1687.076113] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1687.076470] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1687.080275] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eeea940-5498-4018-ae3e-d48aef1cae49 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.116807] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Reconfiguring VM instance to enable vnc on port - 5903 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 1687.117264] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aeff9da5-a31d-4d1e-a56e-43e1a7b7add7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.135353] env[61868]: DEBUG oslo_vmware.api [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Waiting for the task: (returnval){ [ 1687.135353] env[61868]: value = "task-41155" [ 1687.135353] env[61868]: _type = "Task" [ 1687.135353] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.151525] env[61868]: DEBUG oslo_vmware.api [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Task: {'id': task-41155, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.285732] env[61868]: DEBUG nova.network.neutron [req-1adcd760-54ee-41a2-8187-bd12a81fa9ed req-779f9c6f-0b4d-4d47-b4a5-21d15c52576d service nova] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Updated VIF entry in instance network info cache for port 8778757e-fca1-410f-8a35-ca0c49b2d8f0. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1687.286124] env[61868]: DEBUG nova.network.neutron [req-1adcd760-54ee-41a2-8187-bd12a81fa9ed req-779f9c6f-0b4d-4d47-b4a5-21d15c52576d service nova] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Updating instance_info_cache with network_info: [{"id": "8778757e-fca1-410f-8a35-ca0c49b2d8f0", "address": "fa:16:3e:0b:92:b5", "network": {"id": "7b97be17-12d8-40ed-92f8-ea9039a80005", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2055718177-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "f938c85a4daf4a84b0e00619048dcf9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab0428e-1be7-475e-80e3-1f0aa08d4f86", "external-id": "nsx-vlan-transportzone-976", "segmentation_id": 976, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8778757e-fc", "ovs_interfaceid": "8778757e-fca1-410f-8a35-ca0c49b2d8f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1687.295842] env[61868]: DEBUG oslo_concurrency.lockutils [req-1adcd760-54ee-41a2-8187-bd12a81fa9ed req-779f9c6f-0b4d-4d47-b4a5-21d15c52576d service nova] Releasing lock "refresh_cache-c6c6e502-a2aa-4f9c-be05-bf6b50078abf" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1687.645973] env[61868]: DEBUG oslo_vmware.api [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Task: {'id': task-41155, 'name': ReconfigVM_Task, 'duration_secs': 0.109914} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.646677] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Reconfigured VM instance to enable vnc on port - 5903 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 1687.647055] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.571s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1687.647463] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1687.647780] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1687.648244] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1687.648666] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ddee24e-6b39-4dc4-ac23-0bc1fee634f5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.654841] env[61868]: DEBUG oslo_vmware.api [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Waiting for the task: (returnval){ [ 1687.654841] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]5200e191-efed-9276-accf-278c4f43e76a" [ 1687.654841] env[61868]: _type = "Task" [ 1687.654841] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.667411] env[61868]: DEBUG oslo_vmware.api [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]5200e191-efed-9276-accf-278c4f43e76a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.168550] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1688.169085] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1688.169383] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1725.102234] env[61868]: WARNING oslo_vmware.rw_handles [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1725.102234] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1725.102234] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1725.102234] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1725.102234] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1725.102234] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 1725.102234] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1725.102234] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1725.102234] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1725.102234] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1725.102234] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1725.102234] env[61868]: ERROR oslo_vmware.rw_handles [ 1725.103354] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/27bc87b5-078a-49a6-a10e-7e27fa75aa77/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1725.110687] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1725.110952] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Copying Virtual Disk [datastore2] vmware_temp/27bc87b5-078a-49a6-a10e-7e27fa75aa77/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/27bc87b5-078a-49a6-a10e-7e27fa75aa77/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1725.112293] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1c95bfee-3969-434d-9a0f-4766be73af45 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.122989] env[61868]: DEBUG oslo_vmware.api [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Waiting for the task: (returnval){ [ 1725.122989] env[61868]: value = "task-41156" [ 1725.122989] env[61868]: _type = "Task" [ 1725.122989] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.131939] env[61868]: DEBUG oslo_vmware.api [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Task: {'id': task-41156, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.633173] env[61868]: DEBUG oslo_vmware.exceptions [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1725.661708] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1725.661708] env[61868]: ERROR nova.compute.manager [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1725.661708] env[61868]: Faults: ['InvalidArgument'] [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Traceback (most recent call last): [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] yield resources [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] self.driver.spawn(context, instance, image_meta, [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] self._fetch_image_if_missing(context, vi) [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] image_cache(vi, tmp_image_ds_loc) [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] vm_util.copy_virtual_disk( [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] session._wait_for_task(vmdk_copy_task) [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] return self.wait_for_task(task_ref) [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] return evt.wait() [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] result = hub.switch() [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] return self.greenlet.switch() [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] self.f(*self.args, **self.kw) [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] raise exceptions.translate_fault(task_info.error) [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Faults: ['InvalidArgument'] [ 1725.661708] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] [ 1725.661708] env[61868]: INFO nova.compute.manager [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Terminating instance [ 1725.663579] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1725.663579] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1725.663579] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c8be9243-7fcc-4b63-8ea4-e8adfb9390b8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.663579] env[61868]: DEBUG nova.compute.manager [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1725.663579] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1725.663579] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32d47408-4f92-464a-8ea0-1759c9568be3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.663579] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1725.663579] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a0b87e78-beb3-4a9a-bd4d-38c2c8c50d1a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.663579] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1725.663579] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1725.663579] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4771f1e9-0df5-4412-9ae0-2ac9e86e5543 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.663579] env[61868]: DEBUG oslo_vmware.api [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Waiting for the task: (returnval){ [ 1725.663579] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]527e1827-cae9-2f92-3139-7805975411a9" [ 1725.663579] env[61868]: _type = "Task" [ 1725.663579] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.664377] env[61868]: DEBUG oslo_vmware.api [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]527e1827-cae9-2f92-3139-7805975411a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.719655] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1725.719896] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1725.720081] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Deleting the datastore file [datastore2] eafa3522-51e3-4582-b060-3e3ac4224ae2 {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1725.720366] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6c1c0f23-7819-4a0d-9840-b63abd0e2365 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.727154] env[61868]: DEBUG oslo_vmware.api [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Waiting for the task: (returnval){ [ 1725.727154] env[61868]: value = "task-41158" [ 1725.727154] env[61868]: _type = "Task" [ 1725.727154] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.735061] env[61868]: DEBUG oslo_vmware.api [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Task: {'id': task-41158, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.175632] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1726.175987] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Creating directory with path [datastore2] vmware_temp/a5a66a84-fb35-4b4c-a870-c18a2ca822e0/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1726.176200] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5cb786aa-cef1-4188-b9f6-4a353e14d62b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.190094] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Created directory with path [datastore2] vmware_temp/a5a66a84-fb35-4b4c-a870-c18a2ca822e0/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1726.190334] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Fetch image to [datastore2] vmware_temp/a5a66a84-fb35-4b4c-a870-c18a2ca822e0/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1726.190505] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/a5a66a84-fb35-4b4c-a870-c18a2ca822e0/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1726.191596] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f76660e7-5ceb-4d0c-9f19-efd5cd3828e4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.199838] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cabe26ab-45c7-4107-8d20-9a4faae5688c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.211313] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03559ec-38e9-4c0f-929f-ad3725e7fcf9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.249604] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d6ac50f-d795-458f-a82c-4428ce669c78 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.260073] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c1a0aa90-8c5f-4a68-bb78-dea1ca96a68c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.262294] env[61868]: DEBUG oslo_vmware.api [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Task: {'id': task-41158, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083493} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.262598] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1726.262781] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1726.262968] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1726.263255] env[61868]: INFO nova.compute.manager [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1726.265850] env[61868]: DEBUG nova.compute.claims [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1726.266103] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1726.266285] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1726.293262] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1726.350810] env[61868]: DEBUG oslo_vmware.rw_handles [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a5a66a84-fb35-4b4c-a870-c18a2ca822e0/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1726.352156] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1726.406661] env[61868]: DEBUG nova.scheduler.client.report [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Refreshing inventories for resource provider 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1726.411761] env[61868]: DEBUG oslo_vmware.rw_handles [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1726.411914] env[61868]: DEBUG oslo_vmware.rw_handles [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a5a66a84-fb35-4b4c-a870-c18a2ca822e0/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1726.422727] env[61868]: DEBUG nova.scheduler.client.report [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Updating ProviderTree inventory for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1726.422993] env[61868]: DEBUG nova.compute.provider_tree [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Updating inventory in ProviderTree for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1726.437813] env[61868]: DEBUG nova.scheduler.client.report [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Refreshing aggregate associations for resource provider 6539a0d3-09f9-481f-a837-7ea10081c3cc, aggregates: None {{(pid=61868) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1726.457996] env[61868]: DEBUG nova.scheduler.client.report [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Refreshing trait associations for resource provider 6539a0d3-09f9-481f-a837-7ea10081c3cc, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61868) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1726.663913] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8a5c2b4-99d2-4342-8ea4-426d77ecf550 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.672269] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa90b91d-c6a3-42f3-9f9d-3a6109846df2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.705325] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-407db757-807b-4b80-8d93-9daa131cea33 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.713752] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e63f81a2-83bd-42fa-bff0-d07131552148 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.727956] env[61868]: DEBUG nova.compute.provider_tree [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1726.738272] env[61868]: DEBUG nova.scheduler.client.report [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1726.757802] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.491s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1726.758385] env[61868]: ERROR nova.compute.manager [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1726.758385] env[61868]: Faults: ['InvalidArgument'] [ 1726.758385] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Traceback (most recent call last): [ 1726.758385] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1726.758385] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] self.driver.spawn(context, instance, image_meta, [ 1726.758385] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1726.758385] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1726.758385] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1726.758385] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] self._fetch_image_if_missing(context, vi) [ 1726.758385] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1726.758385] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] image_cache(vi, tmp_image_ds_loc) [ 1726.758385] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1726.758385] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] vm_util.copy_virtual_disk( [ 1726.758385] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1726.758385] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] session._wait_for_task(vmdk_copy_task) [ 1726.758385] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1726.758385] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] return self.wait_for_task(task_ref) [ 1726.758385] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1726.758385] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] return evt.wait() [ 1726.758385] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1726.758385] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] result = hub.switch() [ 1726.758385] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1726.758385] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] return self.greenlet.switch() [ 1726.758385] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1726.758385] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] self.f(*self.args, **self.kw) [ 1726.758385] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1726.758385] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] raise exceptions.translate_fault(task_info.error) [ 1726.758385] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1726.758385] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Faults: ['InvalidArgument'] [ 1726.758385] env[61868]: ERROR nova.compute.manager [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] [ 1726.759327] env[61868]: DEBUG nova.compute.utils [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1726.761251] env[61868]: DEBUG nova.compute.manager [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Build of instance eafa3522-51e3-4582-b060-3e3ac4224ae2 was re-scheduled: A specified parameter was not correct: fileType [ 1726.761251] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1726.761589] env[61868]: DEBUG nova.compute.manager [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1726.761765] env[61868]: DEBUG nova.compute.manager [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1726.761962] env[61868]: DEBUG nova.compute.manager [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1726.762273] env[61868]: DEBUG nova.network.neutron [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1727.160913] env[61868]: DEBUG nova.network.neutron [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1727.176292] env[61868]: INFO nova.compute.manager [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Took 0.41 seconds to deallocate network for instance. [ 1727.291529] env[61868]: INFO nova.scheduler.client.report [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Deleted allocations for instance eafa3522-51e3-4582-b060-3e3ac4224ae2 [ 1727.312888] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a6d3a571-b087-40f8-ad62-d645b94bd18c tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Lock "eafa3522-51e3-4582-b060-3e3ac4224ae2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 627.663s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1727.314134] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1960ff2e-d634-4700-81a6-be7eff7edad6 tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Lock "eafa3522-51e3-4582-b060-3e3ac4224ae2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 431.037s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1727.314356] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1960ff2e-d634-4700-81a6-be7eff7edad6 tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Acquiring lock "eafa3522-51e3-4582-b060-3e3ac4224ae2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1727.314566] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1960ff2e-d634-4700-81a6-be7eff7edad6 tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Lock "eafa3522-51e3-4582-b060-3e3ac4224ae2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1727.314736] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1960ff2e-d634-4700-81a6-be7eff7edad6 tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Lock "eafa3522-51e3-4582-b060-3e3ac4224ae2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1727.316870] env[61868]: INFO nova.compute.manager [None req-1960ff2e-d634-4700-81a6-be7eff7edad6 tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Terminating instance [ 1727.318564] env[61868]: DEBUG nova.compute.manager [None req-1960ff2e-d634-4700-81a6-be7eff7edad6 tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1727.318758] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1960ff2e-d634-4700-81a6-be7eff7edad6 tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1727.319337] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6b1efb29-1dc6-48d5-92c7-15b010d4cdc1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.328795] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abdac031-b586-4687-b6c9-25975e5048d4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.340056] env[61868]: DEBUG nova.compute.manager [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1727.363209] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-1960ff2e-d634-4700-81a6-be7eff7edad6 tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance eafa3522-51e3-4582-b060-3e3ac4224ae2 could not be found. [ 1727.363438] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1960ff2e-d634-4700-81a6-be7eff7edad6 tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1727.363623] env[61868]: INFO nova.compute.manager [None req-1960ff2e-d634-4700-81a6-be7eff7edad6 tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1727.363872] env[61868]: DEBUG oslo.service.loopingcall [None req-1960ff2e-d634-4700-81a6-be7eff7edad6 tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1727.364275] env[61868]: DEBUG nova.compute.manager [-] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1727.364378] env[61868]: DEBUG nova.network.neutron [-] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1727.394241] env[61868]: DEBUG oslo_concurrency.lockutils [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1727.394493] env[61868]: DEBUG oslo_concurrency.lockutils [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1727.396087] env[61868]: INFO nova.compute.claims [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1727.398962] env[61868]: DEBUG nova.network.neutron [-] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1727.406716] env[61868]: INFO nova.compute.manager [-] [instance: eafa3522-51e3-4582-b060-3e3ac4224ae2] Took 0.04 seconds to deallocate network for instance. [ 1727.522881] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1960ff2e-d634-4700-81a6-be7eff7edad6 tempest-TaggedAttachmentsTest-1373114596 tempest-TaggedAttachmentsTest-1373114596-project-member] Lock "eafa3522-51e3-4582-b060-3e3ac4224ae2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.209s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1727.632354] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d68d671c-dee0-4195-9590-79ede43255cd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.641085] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b65d3d-9da3-44cc-a6f5-c1a6da17745f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.673645] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0721a33e-7159-4aa0-8f56-8504a170def1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.681866] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-772d0a5b-4d1f-4906-b5a2-f9f00ccaa538 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.696545] env[61868]: DEBUG nova.compute.provider_tree [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1727.705592] env[61868]: DEBUG nova.scheduler.client.report [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1727.720807] env[61868]: DEBUG oslo_concurrency.lockutils [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.326s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1727.721373] env[61868]: DEBUG nova.compute.manager [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1727.755147] env[61868]: DEBUG nova.compute.utils [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1727.756407] env[61868]: DEBUG nova.compute.manager [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1727.756578] env[61868]: DEBUG nova.network.neutron [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1727.770448] env[61868]: DEBUG nova.compute.manager [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1727.803267] env[61868]: DEBUG nova.policy [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a91b3a84ec5c48d896a5bf3d8c568343', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9eabc0f9c1604e90b373219843edfc8d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 1727.844186] env[61868]: DEBUG nova.compute.manager [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1727.867847] env[61868]: DEBUG nova.virt.hardware [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1727.868094] env[61868]: DEBUG nova.virt.hardware [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1727.868427] env[61868]: DEBUG nova.virt.hardware [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1727.868654] env[61868]: DEBUG nova.virt.hardware [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1727.868800] env[61868]: DEBUG nova.virt.hardware [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1727.868942] env[61868]: DEBUG nova.virt.hardware [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1727.869546] env[61868]: DEBUG nova.virt.hardware [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1727.869715] env[61868]: DEBUG nova.virt.hardware [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1727.869884] env[61868]: DEBUG nova.virt.hardware [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1727.870042] env[61868]: DEBUG nova.virt.hardware [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1727.870222] env[61868]: DEBUG nova.virt.hardware [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1727.871117] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-182e0d57-1399-4329-9070-6f03e59abcea {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.880315] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446bb999-9ba1-4aa9-9059-5c0bde51711b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.066950] env[61868]: DEBUG nova.network.neutron [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Successfully created port: 06b5332a-e1df-4ef6-80a4-0f91c41e7c91 {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1728.623578] env[61868]: DEBUG nova.network.neutron [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Successfully updated port: 06b5332a-e1df-4ef6-80a4-0f91c41e7c91 {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1728.635498] env[61868]: DEBUG oslo_concurrency.lockutils [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "refresh_cache-36be944d-04bc-45cd-8019-173437f8ffa5" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1728.635684] env[61868]: DEBUG oslo_concurrency.lockutils [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquired lock "refresh_cache-36be944d-04bc-45cd-8019-173437f8ffa5" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1728.635920] env[61868]: DEBUG nova.network.neutron [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1728.686465] env[61868]: DEBUG nova.network.neutron [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1728.841771] env[61868]: DEBUG nova.network.neutron [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Updating instance_info_cache with network_info: [{"id": "06b5332a-e1df-4ef6-80a4-0f91c41e7c91", "address": "fa:16:3e:ef:5c:1d", "network": {"id": "1018eb28-650c-4602-95a9-5e1826ff57fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-229121660-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "9eabc0f9c1604e90b373219843edfc8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06b5332a-e1", "ovs_interfaceid": "06b5332a-e1df-4ef6-80a4-0f91c41e7c91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1728.856042] env[61868]: DEBUG oslo_concurrency.lockutils [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Releasing lock "refresh_cache-36be944d-04bc-45cd-8019-173437f8ffa5" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1728.856444] env[61868]: DEBUG nova.compute.manager [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Instance network_info: |[{"id": "06b5332a-e1df-4ef6-80a4-0f91c41e7c91", "address": "fa:16:3e:ef:5c:1d", "network": {"id": "1018eb28-650c-4602-95a9-5e1826ff57fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-229121660-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "9eabc0f9c1604e90b373219843edfc8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06b5332a-e1", "ovs_interfaceid": "06b5332a-e1df-4ef6-80a4-0f91c41e7c91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1728.856892] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:5c:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '06b5332a-e1df-4ef6-80a4-0f91c41e7c91', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1728.864517] env[61868]: DEBUG oslo.service.loopingcall [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1728.865052] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1728.865284] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47b6fcaf-5274-433f-b845-6d16f9c71eaf {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.886051] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1728.886051] env[61868]: value = "task-41159" [ 1728.886051] env[61868]: _type = "Task" [ 1728.886051] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.894705] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41159, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.198765] env[61868]: DEBUG nova.compute.manager [req-f7d31e48-29eb-429a-a6a8-4db8c44ae0b6 req-ec093e6e-8068-42dd-8a0e-f61f234f948c service nova] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Received event network-vif-plugged-06b5332a-e1df-4ef6-80a4-0f91c41e7c91 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1729.198765] env[61868]: DEBUG oslo_concurrency.lockutils [req-f7d31e48-29eb-429a-a6a8-4db8c44ae0b6 req-ec093e6e-8068-42dd-8a0e-f61f234f948c service nova] Acquiring lock "36be944d-04bc-45cd-8019-173437f8ffa5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1729.199128] env[61868]: DEBUG oslo_concurrency.lockutils [req-f7d31e48-29eb-429a-a6a8-4db8c44ae0b6 req-ec093e6e-8068-42dd-8a0e-f61f234f948c service nova] Lock "36be944d-04bc-45cd-8019-173437f8ffa5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1729.199128] env[61868]: DEBUG oslo_concurrency.lockutils [req-f7d31e48-29eb-429a-a6a8-4db8c44ae0b6 req-ec093e6e-8068-42dd-8a0e-f61f234f948c service nova] Lock "36be944d-04bc-45cd-8019-173437f8ffa5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1729.199364] env[61868]: DEBUG nova.compute.manager [req-f7d31e48-29eb-429a-a6a8-4db8c44ae0b6 req-ec093e6e-8068-42dd-8a0e-f61f234f948c service nova] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] No waiting events found dispatching network-vif-plugged-06b5332a-e1df-4ef6-80a4-0f91c41e7c91 {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1729.199364] env[61868]: WARNING nova.compute.manager [req-f7d31e48-29eb-429a-a6a8-4db8c44ae0b6 req-ec093e6e-8068-42dd-8a0e-f61f234f948c service nova] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Received unexpected event network-vif-plugged-06b5332a-e1df-4ef6-80a4-0f91c41e7c91 for instance with vm_state building and task_state spawning. [ 1729.199534] env[61868]: DEBUG nova.compute.manager [req-f7d31e48-29eb-429a-a6a8-4db8c44ae0b6 req-ec093e6e-8068-42dd-8a0e-f61f234f948c service nova] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Received event network-changed-06b5332a-e1df-4ef6-80a4-0f91c41e7c91 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1729.199645] env[61868]: DEBUG nova.compute.manager [req-f7d31e48-29eb-429a-a6a8-4db8c44ae0b6 req-ec093e6e-8068-42dd-8a0e-f61f234f948c service nova] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Refreshing instance network info cache due to event network-changed-06b5332a-e1df-4ef6-80a4-0f91c41e7c91. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1729.199828] env[61868]: DEBUG oslo_concurrency.lockutils [req-f7d31e48-29eb-429a-a6a8-4db8c44ae0b6 req-ec093e6e-8068-42dd-8a0e-f61f234f948c service nova] Acquiring lock "refresh_cache-36be944d-04bc-45cd-8019-173437f8ffa5" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1729.199959] env[61868]: DEBUG oslo_concurrency.lockutils [req-f7d31e48-29eb-429a-a6a8-4db8c44ae0b6 req-ec093e6e-8068-42dd-8a0e-f61f234f948c service nova] Acquired lock "refresh_cache-36be944d-04bc-45cd-8019-173437f8ffa5" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1729.200150] env[61868]: DEBUG nova.network.neutron [req-f7d31e48-29eb-429a-a6a8-4db8c44ae0b6 req-ec093e6e-8068-42dd-8a0e-f61f234f948c service nova] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Refreshing network info cache for port 06b5332a-e1df-4ef6-80a4-0f91c41e7c91 {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1729.396242] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41159, 'name': CreateVM_Task, 'duration_secs': 0.460313} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.396427] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1729.397001] env[61868]: DEBUG oslo_concurrency.lockutils [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1729.397239] env[61868]: DEBUG oslo_concurrency.lockutils [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1729.400044] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa15485e-41d2-4e39-a150-137ffb694434 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.435067] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Reconfiguring VM instance to enable vnc on port - 5902 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 1729.435401] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-064f9f11-5181-4027-a4f0-af8aec460602 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.449539] env[61868]: DEBUG nova.network.neutron [req-f7d31e48-29eb-429a-a6a8-4db8c44ae0b6 req-ec093e6e-8068-42dd-8a0e-f61f234f948c service nova] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Updated VIF entry in instance network info cache for port 06b5332a-e1df-4ef6-80a4-0f91c41e7c91. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1729.449885] env[61868]: DEBUG nova.network.neutron [req-f7d31e48-29eb-429a-a6a8-4db8c44ae0b6 req-ec093e6e-8068-42dd-8a0e-f61f234f948c service nova] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Updating instance_info_cache with network_info: [{"id": "06b5332a-e1df-4ef6-80a4-0f91c41e7c91", "address": "fa:16:3e:ef:5c:1d", "network": {"id": "1018eb28-650c-4602-95a9-5e1826ff57fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-229121660-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "9eabc0f9c1604e90b373219843edfc8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06b5332a-e1", "ovs_interfaceid": "06b5332a-e1df-4ef6-80a4-0f91c41e7c91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1729.452871] env[61868]: DEBUG oslo_vmware.api [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Waiting for the task: (returnval){ [ 1729.452871] env[61868]: value = "task-41160" [ 1729.452871] env[61868]: _type = "Task" [ 1729.452871] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.459576] env[61868]: DEBUG oslo_concurrency.lockutils [req-f7d31e48-29eb-429a-a6a8-4db8c44ae0b6 req-ec093e6e-8068-42dd-8a0e-f61f234f948c service nova] Releasing lock "refresh_cache-36be944d-04bc-45cd-8019-173437f8ffa5" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1729.463395] env[61868]: DEBUG oslo_vmware.api [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Task: {'id': task-41160, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.963075] env[61868]: DEBUG oslo_vmware.api [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Task: {'id': task-41160, 'name': ReconfigVM_Task, 'duration_secs': 0.107764} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.963521] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Reconfigured VM instance to enable vnc on port - 5902 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 1729.963632] env[61868]: DEBUG oslo_concurrency.lockutils [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.566s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1729.963875] env[61868]: DEBUG oslo_concurrency.lockutils [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1729.964025] env[61868]: DEBUG oslo_concurrency.lockutils [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1729.964363] env[61868]: DEBUG oslo_concurrency.lockutils [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1729.964609] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5668a3e8-7090-4714-bba3-eca8d93877e8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.969159] env[61868]: DEBUG oslo_vmware.api [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Waiting for the task: (returnval){ [ 1729.969159] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]5238bdfa-9d5c-fa36-24fa-fe1bc02a9021" [ 1729.969159] env[61868]: _type = "Task" [ 1729.969159] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.978136] env[61868]: DEBUG oslo_vmware.api [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]5238bdfa-9d5c-fa36-24fa-fe1bc02a9021, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.408682] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1730.482550] env[61868]: DEBUG oslo_concurrency.lockutils [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1730.482550] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1730.482550] env[61868]: DEBUG oslo_concurrency.lockutils [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1735.352662] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1735.353016] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1735.353016] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 1735.373314] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1735.373498] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1735.373721] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1735.373721] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1735.373827] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1735.373939] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1735.374057] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1735.374175] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1735.374292] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1735.374400] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1735.374513] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 1735.375037] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1735.375170] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Cleaning up deleted instances {{(pid=61868) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 1735.387836] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] There are 0 instances to clean {{(pid=61868) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1739.351068] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1739.351406] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1739.373330] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1739.373330] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Cleaning up deleted instances with incomplete migration {{(pid=61868) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 1741.360662] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1741.361003] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1741.361109] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1741.371093] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1741.371286] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1741.371455] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1741.371610] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1741.372728] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-787ff6ff-5e75-4d41-9e3e-b24fa39a5adb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.381810] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-866d7157-d4d0-4c94-9f35-91586eb42be5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.395788] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc879d6e-a55c-4b81-9277-ae500e93aba4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.402401] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb7dd542-37b8-49ab-83c3-2ae6c0752a90 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.432547] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181908MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1741.432741] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1741.432895] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1741.495560] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance c9f74904-0558-42e6-a454-c7103b2873b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1741.495719] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance efb3f108-d3b3-4ebf-a51f-84dc8274f857 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1741.495845] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ebd4070e-7944-4d2f-8668-01d0ceca0c67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1741.495968] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d6f7828e-6617-40ca-9f6c-e3a72c328dc9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1741.496104] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 95ba0df5-846c-4e5d-94e6-fd9c43dcc191 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1741.496224] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d69cb3f5-b385-432a-b562-87d0b1b0877b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1741.496346] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 97840d8b-90ee-432e-988a-30548b61381b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1741.496462] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance b1473dd0-5427-496c-a94c-5772635b229f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1741.496641] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance c6c6e502-a2aa-4f9c-be05-bf6b50078abf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1741.496798] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 36be944d-04bc-45cd-8019-173437f8ffa5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1741.507506] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance e453b684-a54b-46b3-b9ea-4ab9352965f7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1741.517837] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ad095fd9-abd0-4c75-8d7c-10dcebc2caee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1741.527298] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance fea9d55d-d045-4d4e-b647-044e4729f21c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1741.527524] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1741.527677] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1741.680446] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92a84090-7fe4-48c6-9e27-d08e5a5f8d4e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.688348] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20abdf10-4c1e-425f-a3d2-8d4a9270e1fc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.718947] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e631195-308e-4cc4-bbb2-8e2cc1eae68c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.727187] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a774e19c-ba72-4720-be84-43dd66d77b52 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.741756] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1741.753284] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1741.771817] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1741.772054] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.339s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1744.762953] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1745.351596] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1747.351361] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1747.351737] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 1759.612541] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1759.633613] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Getting list of instances from cluster (obj){ [ 1759.633613] env[61868]: value = "domain-c8" [ 1759.633613] env[61868]: _type = "ClusterComputeResource" [ 1759.633613] env[61868]: } {{(pid=61868) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1759.634922] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7569771f-2052-45bb-9e9a-41a7344336ec {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.652285] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Got total of 10 instances {{(pid=61868) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1759.652455] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Triggering sync for uuid c9f74904-0558-42e6-a454-c7103b2873b1 {{(pid=61868) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10342}} [ 1759.652650] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Triggering sync for uuid efb3f108-d3b3-4ebf-a51f-84dc8274f857 {{(pid=61868) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10342}} [ 1759.652808] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Triggering sync for uuid ebd4070e-7944-4d2f-8668-01d0ceca0c67 {{(pid=61868) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10342}} [ 1759.652960] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Triggering sync for uuid d6f7828e-6617-40ca-9f6c-e3a72c328dc9 {{(pid=61868) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10342}} [ 1759.653112] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Triggering sync for uuid 95ba0df5-846c-4e5d-94e6-fd9c43dcc191 {{(pid=61868) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10342}} [ 1759.653262] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Triggering sync for uuid d69cb3f5-b385-432a-b562-87d0b1b0877b {{(pid=61868) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10342}} [ 1759.653467] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Triggering sync for uuid 97840d8b-90ee-432e-988a-30548b61381b {{(pid=61868) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10342}} [ 1759.653578] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Triggering sync for uuid b1473dd0-5427-496c-a94c-5772635b229f {{(pid=61868) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10342}} [ 1759.653714] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Triggering sync for uuid c6c6e502-a2aa-4f9c-be05-bf6b50078abf {{(pid=61868) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10342}} [ 1759.653856] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Triggering sync for uuid 36be944d-04bc-45cd-8019-173437f8ffa5 {{(pid=61868) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10342}} [ 1759.654184] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "c9f74904-0558-42e6-a454-c7103b2873b1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1759.654428] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "efb3f108-d3b3-4ebf-a51f-84dc8274f857" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1759.654649] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "ebd4070e-7944-4d2f-8668-01d0ceca0c67" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1759.654848] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "d6f7828e-6617-40ca-9f6c-e3a72c328dc9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1759.655041] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "95ba0df5-846c-4e5d-94e6-fd9c43dcc191" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1759.655232] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "d69cb3f5-b385-432a-b562-87d0b1b0877b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1759.655420] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "97840d8b-90ee-432e-988a-30548b61381b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1759.655606] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "b1473dd0-5427-496c-a94c-5772635b229f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1759.655792] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "c6c6e502-a2aa-4f9c-be05-bf6b50078abf" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1759.655979] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "36be944d-04bc-45cd-8019-173437f8ffa5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1775.806020] env[61868]: WARNING oslo_vmware.rw_handles [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1775.806020] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1775.806020] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1775.806020] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1775.806020] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1775.806020] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 1775.806020] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1775.806020] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1775.806020] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1775.806020] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1775.806020] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1775.806020] env[61868]: ERROR oslo_vmware.rw_handles [ 1775.806670] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/a5a66a84-fb35-4b4c-a870-c18a2ca822e0/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1775.808253] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1775.808499] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Copying Virtual Disk [datastore2] vmware_temp/a5a66a84-fb35-4b4c-a870-c18a2ca822e0/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/a5a66a84-fb35-4b4c-a870-c18a2ca822e0/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1775.808778] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eaa84adc-92ac-4606-ad9b-5baaebbb7a49 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.817399] env[61868]: DEBUG oslo_vmware.api [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Waiting for the task: (returnval){ [ 1775.817399] env[61868]: value = "task-41161" [ 1775.817399] env[61868]: _type = "Task" [ 1775.817399] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.826289] env[61868]: DEBUG oslo_vmware.api [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Task: {'id': task-41161, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.329036] env[61868]: DEBUG oslo_vmware.exceptions [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1776.329345] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1776.329922] env[61868]: ERROR nova.compute.manager [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1776.329922] env[61868]: Faults: ['InvalidArgument'] [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Traceback (most recent call last): [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] yield resources [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] self.driver.spawn(context, instance, image_meta, [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] self._fetch_image_if_missing(context, vi) [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] image_cache(vi, tmp_image_ds_loc) [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] vm_util.copy_virtual_disk( [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] session._wait_for_task(vmdk_copy_task) [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] return self.wait_for_task(task_ref) [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] return evt.wait() [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] result = hub.switch() [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] return self.greenlet.switch() [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] self.f(*self.args, **self.kw) [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] raise exceptions.translate_fault(task_info.error) [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Faults: ['InvalidArgument'] [ 1776.329922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] [ 1776.330849] env[61868]: INFO nova.compute.manager [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Terminating instance [ 1776.332442] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1776.332666] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1776.333228] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Acquiring lock "refresh_cache-c9f74904-0558-42e6-a454-c7103b2873b1" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1776.333366] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Acquired lock "refresh_cache-c9f74904-0558-42e6-a454-c7103b2873b1" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1776.333535] env[61868]: DEBUG nova.network.neutron [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1776.334525] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-33fe27c3-c14f-49ff-8bee-f4504b07bcf7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.345831] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1776.346110] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1776.347081] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-038ce3a0-6f5c-42f0-9ee8-da038cc25d7b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.353470] env[61868]: DEBUG oslo_vmware.api [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Waiting for the task: (returnval){ [ 1776.353470] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52ea0d37-de5a-7475-1c4d-9ef050b899bb" [ 1776.353470] env[61868]: _type = "Task" [ 1776.353470] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.363773] env[61868]: DEBUG oslo_vmware.api [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52ea0d37-de5a-7475-1c4d-9ef050b899bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.369359] env[61868]: DEBUG nova.network.neutron [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1776.403043] env[61868]: DEBUG nova.network.neutron [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1776.412784] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Releasing lock "refresh_cache-c9f74904-0558-42e6-a454-c7103b2873b1" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1776.413288] env[61868]: DEBUG nova.compute.manager [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1776.413587] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1776.414702] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b18e5e7-f31e-4465-80c2-c1cf85d978f3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.422869] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1776.423165] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6b2bc31e-379e-47dc-aa5c-5eedf48c047a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.450488] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1776.450795] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1776.451044] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Deleting the datastore file [datastore2] c9f74904-0558-42e6-a454-c7103b2873b1 {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1776.451357] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-231afd7a-dfd7-43fb-928e-2e405adbf69e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.458890] env[61868]: DEBUG oslo_vmware.api [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Waiting for the task: (returnval){ [ 1776.458890] env[61868]: value = "task-41163" [ 1776.458890] env[61868]: _type = "Task" [ 1776.458890] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.466811] env[61868]: DEBUG oslo_vmware.api [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Task: {'id': task-41163, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.864662] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1776.864993] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Creating directory with path [datastore2] vmware_temp/e3f96d0a-84ce-4dc8-b847-950aa72b7007/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1776.865393] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b59ec52-40ff-414b-8689-8f4701b0d4b2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.877573] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Created directory with path [datastore2] vmware_temp/e3f96d0a-84ce-4dc8-b847-950aa72b7007/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1776.877827] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Fetch image to [datastore2] vmware_temp/e3f96d0a-84ce-4dc8-b847-950aa72b7007/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1776.878047] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/e3f96d0a-84ce-4dc8-b847-950aa72b7007/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1776.878851] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59ef9fb5-55cb-4118-8248-6ea44049dc8f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.886131] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-037e8713-9e25-4588-95ca-1c094552b818 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.895193] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de168f3-659d-44f6-b940-dd09fd1b50a9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.925488] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcff690b-e129-414c-a194-53c618283369 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.932124] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9bc798fc-76ae-4b0c-bb33-855e8a6d7a81 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.952087] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1776.968178] env[61868]: DEBUG oslo_vmware.api [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Task: {'id': task-41163, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.036631} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.968438] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1776.968623] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1776.968804] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1776.969220] env[61868]: INFO nova.compute.manager [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Took 0.56 seconds to destroy the instance on the hypervisor. [ 1776.969506] env[61868]: DEBUG oslo.service.loopingcall [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1776.971433] env[61868]: DEBUG nova.compute.manager [-] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1776.971527] env[61868]: DEBUG nova.network.neutron [-] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1777.011797] env[61868]: DEBUG oslo_vmware.rw_handles [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e3f96d0a-84ce-4dc8-b847-950aa72b7007/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1777.069343] env[61868]: DEBUG oslo_vmware.rw_handles [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1777.069504] env[61868]: DEBUG oslo_vmware.rw_handles [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e3f96d0a-84ce-4dc8-b847-950aa72b7007/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1777.107716] env[61868]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61868) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1777.107984] env[61868]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-17830fad-c930-4361-b8cc-f06846bbb4b8'] [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1777.108569] env[61868]: ERROR oslo.service.loopingcall [ 1777.110034] env[61868]: ERROR nova.compute.manager [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1777.154486] env[61868]: WARNING nova.compute.manager [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Could not clean up failed build, not rescheduling. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1777.154763] env[61868]: DEBUG nova.compute.claims [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1777.154931] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1777.155153] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1777.355184] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86fe2346-2ddb-4ad6-9224-82bc28fb136c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.362952] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9590033d-d995-4b64-80f7-25ccb7ad1e37 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.395315] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9122b72e-e6f4-4e6b-a1b8-14fa0dc39ef4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.403332] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fee3280-0282-48b0-a63e-5490483a5e42 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.416992] env[61868]: DEBUG nova.compute.provider_tree [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1777.425504] env[61868]: DEBUG nova.scheduler.client.report [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1777.442438] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.287s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1777.442679] env[61868]: DEBUG nova.compute.manager [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Build of instance c9f74904-0558-42e6-a454-c7103b2873b1 aborted: A specified parameter was not correct: fileType [ 1777.442679] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2661}} [ 1777.443397] env[61868]: DEBUG nova.compute.utils [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Build of instance c9f74904-0558-42e6-a454-c7103b2873b1 aborted: A specified parameter was not correct: fileType [ 1777.443397] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1777.445126] env[61868]: ERROR nova.compute.manager [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Build of instance c9f74904-0558-42e6-a454-c7103b2873b1 aborted: A specified parameter was not correct: fileType [ 1777.445126] env[61868]: Faults: ['InvalidArgument']: nova.exception.BuildAbortException: Build of instance c9f74904-0558-42e6-a454-c7103b2873b1 aborted: A specified parameter was not correct: fileType [ 1777.445323] env[61868]: DEBUG nova.compute.manager [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1777.445567] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Acquiring lock "refresh_cache-c9f74904-0558-42e6-a454-c7103b2873b1" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1777.445805] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Acquired lock "refresh_cache-c9f74904-0558-42e6-a454-c7103b2873b1" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1777.445997] env[61868]: DEBUG nova.network.neutron [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1777.472529] env[61868]: DEBUG nova.network.neutron [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1777.496512] env[61868]: DEBUG nova.network.neutron [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1777.504661] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Releasing lock "refresh_cache-c9f74904-0558-42e6-a454-c7103b2873b1" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1777.505032] env[61868]: DEBUG nova.compute.manager [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1777.505106] env[61868]: DEBUG nova.compute.manager [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1777.505213] env[61868]: DEBUG nova.network.neutron [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1777.587348] env[61868]: DEBUG neutronclient.v2_0.client [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61868) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1777.588537] env[61868]: ERROR nova.compute.manager [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Traceback (most recent call last): [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] ret = obj(*args, **kwargs) [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] exception_handler_v20(status_code, error_body) [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] raise client_exc(message=error_message, [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Neutron server returns request_ids: ['req-17830fad-c930-4361-b8cc-f06846bbb4b8'] [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] During handling of the above exception, another exception occurred: [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Traceback (most recent call last): [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/compute/manager.py", line 2902, in _build_resources [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] self._shutdown_instance(context, instance, [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] self._try_deallocate_network(context, instance, requested_networks) [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] with excutils.save_and_reraise_exception(): [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] self.force_reraise() [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] raise self.value [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] _deallocate_network_with_retries() [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] return evt.wait() [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] result = hub.switch() [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] return self.greenlet.switch() [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] result = func(*self.args, **self.kw) [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] result = f(*args, **kwargs) [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1777.588537] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] self._deallocate_network( [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] self.network_api.deallocate_for_instance( [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] data = neutron.list_ports(**search_opts) [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] ret = obj(*args, **kwargs) [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] return self.list('ports', self.ports_path, retrieve_all, [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] ret = obj(*args, **kwargs) [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] for r in self._pagination(collection, path, **params): [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] res = self.get(path, params=params) [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] ret = obj(*args, **kwargs) [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] return self.retry_request("GET", action, body=body, [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] ret = obj(*args, **kwargs) [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] return self.do_request(method, action, body=body, [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] ret = obj(*args, **kwargs) [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] self._handle_fault_response(status_code, replybody, resp) [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] During handling of the above exception, another exception occurred: [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Traceback (most recent call last): [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/compute/manager.py", line 2447, in _do_build_and_run_instance [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] self._build_and_run_instance(context, instance, image, [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/compute/manager.py", line 2660, in _build_and_run_instance [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] with excutils.save_and_reraise_exception(): [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1777.590523] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] self.force_reraise() [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] raise self.value [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] with self._build_resources(context, instance, [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] self.gen.throw(typ, value, traceback) [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/compute/manager.py", line 2910, in _build_resources [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] raise exception.BuildAbortException( [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] nova.exception.BuildAbortException: Build of instance c9f74904-0558-42e6-a454-c7103b2873b1 aborted: A specified parameter was not correct: fileType [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Faults: ['InvalidArgument'] [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] During handling of the above exception, another exception occurred: [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Traceback (most recent call last): [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] ret = obj(*args, **kwargs) [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] exception_handler_v20(status_code, error_body) [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] raise client_exc(message=error_message, [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Neutron server returns request_ids: ['req-f3a66150-f534-4466-a51e-b3cd8980d387'] [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] During handling of the above exception, another exception occurred: [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Traceback (most recent call last): [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/compute/manager.py", line 3036, in _cleanup_allocated_networks [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] self._deallocate_network(context, instance, requested_networks) [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] self.network_api.deallocate_for_instance( [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] data = neutron.list_ports(**search_opts) [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] ret = obj(*args, **kwargs) [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] return self.list('ports', self.ports_path, retrieve_all, [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] ret = obj(*args, **kwargs) [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1777.592922] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] for r in self._pagination(collection, path, **params): [ 1777.594837] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1777.594837] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] res = self.get(path, params=params) [ 1777.594837] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.594837] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] ret = obj(*args, **kwargs) [ 1777.594837] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1777.594837] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] return self.retry_request("GET", action, body=body, [ 1777.594837] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.594837] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] ret = obj(*args, **kwargs) [ 1777.594837] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1777.594837] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] return self.do_request(method, action, body=body, [ 1777.594837] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.594837] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] ret = obj(*args, **kwargs) [ 1777.594837] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1777.594837] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] self._handle_fault_response(status_code, replybody, resp) [ 1777.594837] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1777.594837] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] raise exception.Unauthorized() [ 1777.594837] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] nova.exception.Unauthorized: Not authorized. [ 1777.594837] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] [ 1777.672736] env[61868]: INFO nova.scheduler.client.report [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Deleted allocations for instance c9f74904-0558-42e6-a454-c7103b2873b1 [ 1777.673029] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d81b75fe-9a4d-43de-8992-411e06116704 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Lock "c9f74904-0558-42e6-a454-c7103b2873b1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 643.677s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1777.674189] env[61868]: DEBUG oslo_concurrency.lockutils [None req-cd44f714-a8c1-47cb-b5f0-45dcc7341a76 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Lock "c9f74904-0558-42e6-a454-c7103b2873b1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 447.763s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1777.674412] env[61868]: DEBUG oslo_concurrency.lockutils [None req-cd44f714-a8c1-47cb-b5f0-45dcc7341a76 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Acquiring lock "c9f74904-0558-42e6-a454-c7103b2873b1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1777.674614] env[61868]: DEBUG oslo_concurrency.lockutils [None req-cd44f714-a8c1-47cb-b5f0-45dcc7341a76 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Lock "c9f74904-0558-42e6-a454-c7103b2873b1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1777.674788] env[61868]: DEBUG oslo_concurrency.lockutils [None req-cd44f714-a8c1-47cb-b5f0-45dcc7341a76 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Lock "c9f74904-0558-42e6-a454-c7103b2873b1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1777.676547] env[61868]: INFO nova.compute.manager [None req-cd44f714-a8c1-47cb-b5f0-45dcc7341a76 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Terminating instance [ 1777.678174] env[61868]: DEBUG oslo_concurrency.lockutils [None req-cd44f714-a8c1-47cb-b5f0-45dcc7341a76 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Acquiring lock "refresh_cache-c9f74904-0558-42e6-a454-c7103b2873b1" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1777.678341] env[61868]: DEBUG oslo_concurrency.lockutils [None req-cd44f714-a8c1-47cb-b5f0-45dcc7341a76 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Acquired lock "refresh_cache-c9f74904-0558-42e6-a454-c7103b2873b1" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1777.678514] env[61868]: DEBUG nova.network.neutron [None req-cd44f714-a8c1-47cb-b5f0-45dcc7341a76 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1777.687158] env[61868]: DEBUG nova.compute.manager [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1777.710788] env[61868]: DEBUG nova.network.neutron [None req-cd44f714-a8c1-47cb-b5f0-45dcc7341a76 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1777.737867] env[61868]: DEBUG nova.network.neutron [None req-cd44f714-a8c1-47cb-b5f0-45dcc7341a76 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1777.743927] env[61868]: DEBUG oslo_concurrency.lockutils [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1777.744290] env[61868]: DEBUG oslo_concurrency.lockutils [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1777.745757] env[61868]: INFO nova.compute.claims [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1777.748889] env[61868]: DEBUG oslo_concurrency.lockutils [None req-cd44f714-a8c1-47cb-b5f0-45dcc7341a76 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Releasing lock "refresh_cache-c9f74904-0558-42e6-a454-c7103b2873b1" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1777.749294] env[61868]: DEBUG nova.compute.manager [None req-cd44f714-a8c1-47cb-b5f0-45dcc7341a76 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1777.749494] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-cd44f714-a8c1-47cb-b5f0-45dcc7341a76 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1777.749967] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0abd302e-bf3b-491f-97f2-32e3ef3bc099 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.760630] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eaaaa39-e3be-44f6-b88a-f81402588379 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.792773] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-cd44f714-a8c1-47cb-b5f0-45dcc7341a76 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c9f74904-0558-42e6-a454-c7103b2873b1 could not be found. [ 1777.792981] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-cd44f714-a8c1-47cb-b5f0-45dcc7341a76 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1777.793164] env[61868]: INFO nova.compute.manager [None req-cd44f714-a8c1-47cb-b5f0-45dcc7341a76 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1777.793411] env[61868]: DEBUG oslo.service.loopingcall [None req-cd44f714-a8c1-47cb-b5f0-45dcc7341a76 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1777.796328] env[61868]: DEBUG nova.compute.manager [-] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1777.796439] env[61868]: DEBUG nova.network.neutron [-] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1777.883408] env[61868]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61868) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1777.883674] env[61868]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-0acf17fb-a797-4a45-905d-6b57b1ce9bf1'] [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1777.884263] env[61868]: ERROR oslo.service.loopingcall [ 1777.885536] env[61868]: ERROR nova.compute.manager [None req-cd44f714-a8c1-47cb-b5f0-45dcc7341a76 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1777.920436] env[61868]: ERROR nova.compute.manager [None req-cd44f714-a8c1-47cb-b5f0-45dcc7341a76 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Traceback (most recent call last): [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] ret = obj(*args, **kwargs) [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] exception_handler_v20(status_code, error_body) [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] raise client_exc(message=error_message, [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Neutron server returns request_ids: ['req-0acf17fb-a797-4a45-905d-6b57b1ce9bf1'] [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] During handling of the above exception, another exception occurred: [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Traceback (most recent call last): [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] self._delete_instance(context, instance, bdms) [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] self._shutdown_instance(context, instance, bdms) [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] self._try_deallocate_network(context, instance, requested_networks) [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] with excutils.save_and_reraise_exception(): [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] self.force_reraise() [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] raise self.value [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] _deallocate_network_with_retries() [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] return evt.wait() [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] result = hub.switch() [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] return self.greenlet.switch() [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] result = func(*self.args, **self.kw) [ 1777.920436] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] result = f(*args, **kwargs) [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] self._deallocate_network( [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] self.network_api.deallocate_for_instance( [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] data = neutron.list_ports(**search_opts) [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] ret = obj(*args, **kwargs) [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] return self.list('ports', self.ports_path, retrieve_all, [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] ret = obj(*args, **kwargs) [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] for r in self._pagination(collection, path, **params): [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] res = self.get(path, params=params) [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] ret = obj(*args, **kwargs) [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] return self.retry_request("GET", action, body=body, [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] ret = obj(*args, **kwargs) [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] return self.do_request(method, action, body=body, [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] ret = obj(*args, **kwargs) [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] self._handle_fault_response(status_code, replybody, resp) [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1777.921525] env[61868]: ERROR nova.compute.manager [instance: c9f74904-0558-42e6-a454-c7103b2873b1] [ 1777.954751] env[61868]: DEBUG oslo_concurrency.lockutils [None req-cd44f714-a8c1-47cb-b5f0-45dcc7341a76 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Lock "c9f74904-0558-42e6-a454-c7103b2873b1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.280s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1777.956386] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "c9f74904-0558-42e6-a454-c7103b2873b1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 18.302s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1777.956654] env[61868]: INFO nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] During sync_power_state the instance has a pending task (deleting). Skip. [ 1777.956891] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "c9f74904-0558-42e6-a454-c7103b2873b1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1777.974203] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1546db64-7044-4418-be18-0d59093caa4d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.985521] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb87b10-6d0f-406f-aa19-f23a1844827a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.018777] env[61868]: INFO nova.compute.manager [None req-cd44f714-a8c1-47cb-b5f0-45dcc7341a76 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] [instance: c9f74904-0558-42e6-a454-c7103b2873b1] Successfully reverted task state from None on failure for instance. [ 1778.021666] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19ade8e7-ed8c-4f37-9455-e54e6c2cd1bd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server [None req-cd44f714-a8c1-47cb-b5f0-45dcc7341a76 tempest-TenantUsagesTestJSON-1263202905 tempest-TenantUsagesTestJSON-1263202905-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-0acf17fb-a797-4a45-905d-6b57b1ce9bf1'] [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server raise self.value [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server raise self.value [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server raise self.value [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 414, in inner [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1778.025209] env[61868]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server raise self.value [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server raise self.value [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1778.026744] env[61868]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1778.028099] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1778.028099] env[61868]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1778.028099] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1778.028099] env[61868]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1778.028099] env[61868]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1778.028099] env[61868]: ERROR oslo_messaging.rpc.server [ 1778.031492] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc48c0b4-0e18-4be9-b8be-f6d9bd718aab {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.046047] env[61868]: DEBUG nova.compute.provider_tree [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1778.054420] env[61868]: DEBUG nova.scheduler.client.report [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1778.069999] env[61868]: DEBUG oslo_concurrency.lockutils [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.326s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1778.070540] env[61868]: DEBUG nova.compute.manager [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1778.108224] env[61868]: DEBUG nova.compute.utils [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1778.109543] env[61868]: DEBUG nova.compute.manager [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1778.109712] env[61868]: DEBUG nova.network.neutron [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1778.120206] env[61868]: DEBUG nova.compute.manager [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1778.159958] env[61868]: DEBUG nova.policy [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bf74f15b527146bb9bc726e54d220a65', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '01d5fac165e449d49cd6e9d9c7e9d116', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 1778.193028] env[61868]: DEBUG nova.compute.manager [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1778.215828] env[61868]: DEBUG nova.virt.hardware [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1778.216096] env[61868]: DEBUG nova.virt.hardware [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1778.216256] env[61868]: DEBUG nova.virt.hardware [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1778.216436] env[61868]: DEBUG nova.virt.hardware [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1778.216581] env[61868]: DEBUG nova.virt.hardware [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1778.216723] env[61868]: DEBUG nova.virt.hardware [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1778.216932] env[61868]: DEBUG nova.virt.hardware [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1778.217089] env[61868]: DEBUG nova.virt.hardware [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1778.217254] env[61868]: DEBUG nova.virt.hardware [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1778.217413] env[61868]: DEBUG nova.virt.hardware [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1778.217579] env[61868]: DEBUG nova.virt.hardware [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1778.218455] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a601eac-61f1-45c0-abca-6f4a37c7b769 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.227011] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa80a468-6b10-4ab0-9c83-f26e38db37e6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.437678] env[61868]: DEBUG nova.network.neutron [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Successfully created port: 160891cf-8fe2-484f-9a30-b133226db788 {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1778.947307] env[61868]: DEBUG nova.compute.manager [req-d850adea-d04c-4041-ad88-6d00c8bb8d1c req-64e22bc5-c928-4bd3-b503-e49caece0464 service nova] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Received event network-vif-plugged-160891cf-8fe2-484f-9a30-b133226db788 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1778.947307] env[61868]: DEBUG oslo_concurrency.lockutils [req-d850adea-d04c-4041-ad88-6d00c8bb8d1c req-64e22bc5-c928-4bd3-b503-e49caece0464 service nova] Acquiring lock "e453b684-a54b-46b3-b9ea-4ab9352965f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1778.947307] env[61868]: DEBUG oslo_concurrency.lockutils [req-d850adea-d04c-4041-ad88-6d00c8bb8d1c req-64e22bc5-c928-4bd3-b503-e49caece0464 service nova] Lock "e453b684-a54b-46b3-b9ea-4ab9352965f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1778.947307] env[61868]: DEBUG oslo_concurrency.lockutils [req-d850adea-d04c-4041-ad88-6d00c8bb8d1c req-64e22bc5-c928-4bd3-b503-e49caece0464 service nova] Lock "e453b684-a54b-46b3-b9ea-4ab9352965f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1778.947307] env[61868]: DEBUG nova.compute.manager [req-d850adea-d04c-4041-ad88-6d00c8bb8d1c req-64e22bc5-c928-4bd3-b503-e49caece0464 service nova] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] No waiting events found dispatching network-vif-plugged-160891cf-8fe2-484f-9a30-b133226db788 {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1778.947307] env[61868]: WARNING nova.compute.manager [req-d850adea-d04c-4041-ad88-6d00c8bb8d1c req-64e22bc5-c928-4bd3-b503-e49caece0464 service nova] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Received unexpected event network-vif-plugged-160891cf-8fe2-484f-9a30-b133226db788 for instance with vm_state building and task_state spawning. [ 1779.028921] env[61868]: DEBUG nova.network.neutron [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Successfully updated port: 160891cf-8fe2-484f-9a30-b133226db788 {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1779.040381] env[61868]: DEBUG oslo_concurrency.lockutils [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "refresh_cache-e453b684-a54b-46b3-b9ea-4ab9352965f7" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1779.040520] env[61868]: DEBUG oslo_concurrency.lockutils [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquired lock "refresh_cache-e453b684-a54b-46b3-b9ea-4ab9352965f7" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1779.040673] env[61868]: DEBUG nova.network.neutron [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1779.084447] env[61868]: DEBUG nova.network.neutron [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1779.240807] env[61868]: DEBUG nova.network.neutron [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Updating instance_info_cache with network_info: [{"id": "160891cf-8fe2-484f-9a30-b133226db788", "address": "fa:16:3e:5d:d0:9c", "network": {"id": "843ef760-8abf-4cfe-bcb8-9ec2e65dca2f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1286639074-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "01d5fac165e449d49cd6e9d9c7e9d116", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap160891cf-8f", "ovs_interfaceid": "160891cf-8fe2-484f-9a30-b133226db788", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1779.258854] env[61868]: DEBUG oslo_concurrency.lockutils [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Releasing lock "refresh_cache-e453b684-a54b-46b3-b9ea-4ab9352965f7" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1779.259247] env[61868]: DEBUG nova.compute.manager [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Instance network_info: |[{"id": "160891cf-8fe2-484f-9a30-b133226db788", "address": "fa:16:3e:5d:d0:9c", "network": {"id": "843ef760-8abf-4cfe-bcb8-9ec2e65dca2f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1286639074-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "01d5fac165e449d49cd6e9d9c7e9d116", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap160891cf-8f", "ovs_interfaceid": "160891cf-8fe2-484f-9a30-b133226db788", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1779.260167] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5d:d0:9c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4020f51-6e46-4b73-a79e-9fe3fd51b917', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '160891cf-8fe2-484f-9a30-b133226db788', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1779.268163] env[61868]: DEBUG oslo.service.loopingcall [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1779.268796] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1779.270088] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bd94962f-4166-4589-8938-c8c1a606aae2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.291729] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1779.291729] env[61868]: value = "task-41164" [ 1779.291729] env[61868]: _type = "Task" [ 1779.291729] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.300420] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41164, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.802997] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41164, 'name': CreateVM_Task, 'duration_secs': 0.313045} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.803149] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1779.803758] env[61868]: DEBUG oslo_concurrency.lockutils [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1779.803994] env[61868]: DEBUG oslo_concurrency.lockutils [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1779.806787] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8682c0f3-0ff4-41ac-86e1-93a14608b0bc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.840145] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Reconfiguring VM instance to enable vnc on port - 5904 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 1779.840550] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e638438e-319f-4da3-a7e3-218f9e800b74 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.857846] env[61868]: DEBUG oslo_vmware.api [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for the task: (returnval){ [ 1779.857846] env[61868]: value = "task-41165" [ 1779.857846] env[61868]: _type = "Task" [ 1779.857846] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.866255] env[61868]: DEBUG oslo_vmware.api [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': task-41165, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.368305] env[61868]: DEBUG oslo_vmware.api [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': task-41165, 'name': ReconfigVM_Task, 'duration_secs': 0.112826} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.368675] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Reconfigured VM instance to enable vnc on port - 5904 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 1780.368822] env[61868]: DEBUG oslo_concurrency.lockutils [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.565s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1780.369082] env[61868]: DEBUG oslo_concurrency.lockutils [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1780.369234] env[61868]: DEBUG oslo_concurrency.lockutils [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1780.369617] env[61868]: DEBUG oslo_concurrency.lockutils [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1780.369922] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d97542d-64ea-4e9f-94b4-77036e3f7c48 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.375841] env[61868]: DEBUG oslo_vmware.api [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for the task: (returnval){ [ 1780.375841] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52f736e4-97ac-66cc-5ee8-794d9c0510a7" [ 1780.375841] env[61868]: _type = "Task" [ 1780.375841] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.384564] env[61868]: DEBUG oslo_vmware.api [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52f736e4-97ac-66cc-5ee8-794d9c0510a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.886907] env[61868]: DEBUG oslo_concurrency.lockutils [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1780.887302] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1780.887594] env[61868]: DEBUG oslo_concurrency.lockutils [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1780.976741] env[61868]: DEBUG nova.compute.manager [req-f19ff67e-3141-479c-bf4b-1d61534ecfc7 req-1e196503-2ee8-42c6-9256-374dbdf8ed69 service nova] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Received event network-changed-160891cf-8fe2-484f-9a30-b133226db788 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1780.977073] env[61868]: DEBUG nova.compute.manager [req-f19ff67e-3141-479c-bf4b-1d61534ecfc7 req-1e196503-2ee8-42c6-9256-374dbdf8ed69 service nova] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Refreshing instance network info cache due to event network-changed-160891cf-8fe2-484f-9a30-b133226db788. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1780.977427] env[61868]: DEBUG oslo_concurrency.lockutils [req-f19ff67e-3141-479c-bf4b-1d61534ecfc7 req-1e196503-2ee8-42c6-9256-374dbdf8ed69 service nova] Acquiring lock "refresh_cache-e453b684-a54b-46b3-b9ea-4ab9352965f7" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1780.977690] env[61868]: DEBUG oslo_concurrency.lockutils [req-f19ff67e-3141-479c-bf4b-1d61534ecfc7 req-1e196503-2ee8-42c6-9256-374dbdf8ed69 service nova] Acquired lock "refresh_cache-e453b684-a54b-46b3-b9ea-4ab9352965f7" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1780.977976] env[61868]: DEBUG nova.network.neutron [req-f19ff67e-3141-479c-bf4b-1d61534ecfc7 req-1e196503-2ee8-42c6-9256-374dbdf8ed69 service nova] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Refreshing network info cache for port 160891cf-8fe2-484f-9a30-b133226db788 {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1781.127213] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ea71951-c068-45ed-be7c-44f636d745b1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Acquiring lock "c6c6e502-a2aa-4f9c-be05-bf6b50078abf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1781.224405] env[61868]: DEBUG nova.network.neutron [req-f19ff67e-3141-479c-bf4b-1d61534ecfc7 req-1e196503-2ee8-42c6-9256-374dbdf8ed69 service nova] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Updated VIF entry in instance network info cache for port 160891cf-8fe2-484f-9a30-b133226db788. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1781.224774] env[61868]: DEBUG nova.network.neutron [req-f19ff67e-3141-479c-bf4b-1d61534ecfc7 req-1e196503-2ee8-42c6-9256-374dbdf8ed69 service nova] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Updating instance_info_cache with network_info: [{"id": "160891cf-8fe2-484f-9a30-b133226db788", "address": "fa:16:3e:5d:d0:9c", "network": {"id": "843ef760-8abf-4cfe-bcb8-9ec2e65dca2f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1286639074-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "01d5fac165e449d49cd6e9d9c7e9d116", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap160891cf-8f", "ovs_interfaceid": "160891cf-8fe2-484f-9a30-b133226db788", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1781.234981] env[61868]: DEBUG oslo_concurrency.lockutils [req-f19ff67e-3141-479c-bf4b-1d61534ecfc7 req-1e196503-2ee8-42c6-9256-374dbdf8ed69 service nova] Releasing lock "refresh_cache-e453b684-a54b-46b3-b9ea-4ab9352965f7" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1787.619157] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ffbf63ab-7ea2-4f61-a3ae-bed6eaba7915 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "36be944d-04bc-45cd-8019-173437f8ffa5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1791.396549] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1797.352668] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1797.353047] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1797.353047] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 1797.373419] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1797.374313] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1797.374313] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1797.374313] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1797.374313] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1797.374313] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1797.374313] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1797.374313] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1797.374612] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1797.374612] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1797.374612] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 1801.368186] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1802.351046] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1802.351330] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1802.361453] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1802.361699] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1802.361872] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1802.362029] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1802.363148] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f31bb6a6-618d-421e-a490-19c37a71c0b4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.373007] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-225bdc40-27fc-444e-9626-3dcd1b1a936a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.387852] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6face08b-125a-4549-a492-2338dab4246d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.395025] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47444023-e8ed-4fe7-9572-e857dd4eb44d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.426084] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181933MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1802.426264] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1802.426469] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1802.491310] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance efb3f108-d3b3-4ebf-a51f-84dc8274f857 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1802.491482] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ebd4070e-7944-4d2f-8668-01d0ceca0c67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1802.491611] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d6f7828e-6617-40ca-9f6c-e3a72c328dc9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1802.491734] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 95ba0df5-846c-4e5d-94e6-fd9c43dcc191 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1802.491854] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d69cb3f5-b385-432a-b562-87d0b1b0877b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1802.491973] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 97840d8b-90ee-432e-988a-30548b61381b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1802.492107] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance b1473dd0-5427-496c-a94c-5772635b229f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1802.492229] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance c6c6e502-a2aa-4f9c-be05-bf6b50078abf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1802.492345] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 36be944d-04bc-45cd-8019-173437f8ffa5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1802.492509] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance e453b684-a54b-46b3-b9ea-4ab9352965f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1802.503599] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ad095fd9-abd0-4c75-8d7c-10dcebc2caee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1802.514341] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance fea9d55d-d045-4d4e-b647-044e4729f21c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1802.514580] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1802.514731] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1802.669127] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6717f68d-b6de-4c4a-8207-abb0d3f54879 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.677499] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59a7e2db-deab-4352-950e-2a566f122ea0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.707363] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e2bc948-2c1c-4f97-a76b-387516894cfc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.715531] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ecd5dd2-0363-4a5f-9b67-a1fa7297b850 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.731974] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1802.740308] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1802.758544] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1802.758741] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.332s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1803.759183] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1806.351845] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1806.352267] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1809.352465] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1809.352465] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 1812.459181] env[61868]: DEBUG oslo_concurrency.lockutils [None req-de9eb150-bf26-4cc0-8680-1db67e0672f3 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "e453b684-a54b-46b3-b9ea-4ab9352965f7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1825.135483] env[61868]: WARNING oslo_vmware.rw_handles [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1825.135483] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1825.135483] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1825.135483] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1825.135483] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1825.135483] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 1825.135483] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1825.135483] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1825.135483] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1825.135483] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1825.135483] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1825.135483] env[61868]: ERROR oslo_vmware.rw_handles [ 1825.136262] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/e3f96d0a-84ce-4dc8-b847-950aa72b7007/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1825.137544] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1825.137794] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Copying Virtual Disk [datastore2] vmware_temp/e3f96d0a-84ce-4dc8-b847-950aa72b7007/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/e3f96d0a-84ce-4dc8-b847-950aa72b7007/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1825.138068] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-78a0a53e-361c-4c05-93ac-de5f78e4c4dc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.146607] env[61868]: DEBUG oslo_vmware.api [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Waiting for the task: (returnval){ [ 1825.146607] env[61868]: value = "task-41166" [ 1825.146607] env[61868]: _type = "Task" [ 1825.146607] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.155034] env[61868]: DEBUG oslo_vmware.api [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Task: {'id': task-41166, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.657432] env[61868]: DEBUG oslo_vmware.exceptions [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1825.657719] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1825.658340] env[61868]: ERROR nova.compute.manager [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1825.658340] env[61868]: Faults: ['InvalidArgument'] [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Traceback (most recent call last): [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] yield resources [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] self.driver.spawn(context, instance, image_meta, [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] self._fetch_image_if_missing(context, vi) [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] image_cache(vi, tmp_image_ds_loc) [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] vm_util.copy_virtual_disk( [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] session._wait_for_task(vmdk_copy_task) [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] return self.wait_for_task(task_ref) [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] return evt.wait() [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] result = hub.switch() [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] return self.greenlet.switch() [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] self.f(*self.args, **self.kw) [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] raise exceptions.translate_fault(task_info.error) [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Faults: ['InvalidArgument'] [ 1825.658340] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] [ 1825.659356] env[61868]: INFO nova.compute.manager [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Terminating instance [ 1825.660259] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1825.660474] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1825.660715] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-48da9237-9f62-4f0a-bd4e-13399d807ee4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.663141] env[61868]: DEBUG nova.compute.manager [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1825.663322] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1825.664318] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e03d0d0-1ab9-4213-bf60-b84668bf3ecd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.671369] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1825.671586] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d967410e-17be-4250-a750-683cd91a758f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.673826] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1825.673987] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1825.674948] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4166bc47-836a-4a5f-bb15-88d6bfc9dc53 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.679661] env[61868]: DEBUG oslo_vmware.api [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for the task: (returnval){ [ 1825.679661] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52db71f7-f5ac-5a90-135d-7e6628a42938" [ 1825.679661] env[61868]: _type = "Task" [ 1825.679661] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.687048] env[61868]: DEBUG oslo_vmware.api [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52db71f7-f5ac-5a90-135d-7e6628a42938, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.737905] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1825.738124] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1825.738309] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Deleting the datastore file [datastore2] efb3f108-d3b3-4ebf-a51f-84dc8274f857 {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1825.738596] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eb6710e7-2ddb-4944-a31e-d64cdd9dcd98 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.745365] env[61868]: DEBUG oslo_vmware.api [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Waiting for the task: (returnval){ [ 1825.745365] env[61868]: value = "task-41168" [ 1825.745365] env[61868]: _type = "Task" [ 1825.745365] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.753502] env[61868]: DEBUG oslo_vmware.api [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Task: {'id': task-41168, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.190615] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1826.190970] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Creating directory with path [datastore2] vmware_temp/91e84b47-a098-47fd-9168-17bccb7e6622/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1826.191073] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8519a43a-bd8c-4e99-b24b-883a44d69563 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.202631] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Created directory with path [datastore2] vmware_temp/91e84b47-a098-47fd-9168-17bccb7e6622/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1826.202846] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Fetch image to [datastore2] vmware_temp/91e84b47-a098-47fd-9168-17bccb7e6622/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1826.203022] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/91e84b47-a098-47fd-9168-17bccb7e6622/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1826.203753] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c57321a5-417d-48a4-a9d5-6f5f2241ded1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.210351] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1236d534-0feb-4b56-807c-107da974934e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.220292] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11138578-aaf8-40e9-9a42-003a83fe739f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.253455] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117d0dd5-b68f-44e6-8f6c-88184eb3d0c3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.260996] env[61868]: DEBUG oslo_vmware.api [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Task: {'id': task-41168, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074854} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.262565] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1826.262764] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1826.262939] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1826.263153] env[61868]: INFO nova.compute.manager [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1826.265828] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-19546331-61e5-4495-81bb-3711080a1a38 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.267868] env[61868]: DEBUG nova.compute.claims [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1826.268077] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1826.268299] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1826.291836] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1826.349412] env[61868]: DEBUG oslo_vmware.rw_handles [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/91e84b47-a098-47fd-9168-17bccb7e6622/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1826.407920] env[61868]: DEBUG oslo_vmware.rw_handles [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1826.408121] env[61868]: DEBUG oslo_vmware.rw_handles [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/91e84b47-a098-47fd-9168-17bccb7e6622/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1826.507438] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebecd460-0c96-452c-b5bf-c17d02a6387c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.515017] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ee045e-e5be-4e1d-ae5a-33d2a457bbf0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.545025] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6cadee9-6327-47e3-9c87-a5bbaa79c818 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.551843] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0861edc-6e34-4d88-aecd-ba9887bde0f1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.564728] env[61868]: DEBUG nova.compute.provider_tree [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1826.573123] env[61868]: DEBUG nova.scheduler.client.report [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1826.591819] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.323s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1826.592417] env[61868]: ERROR nova.compute.manager [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1826.592417] env[61868]: Faults: ['InvalidArgument'] [ 1826.592417] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Traceback (most recent call last): [ 1826.592417] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1826.592417] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] self.driver.spawn(context, instance, image_meta, [ 1826.592417] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1826.592417] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1826.592417] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1826.592417] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] self._fetch_image_if_missing(context, vi) [ 1826.592417] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1826.592417] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] image_cache(vi, tmp_image_ds_loc) [ 1826.592417] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1826.592417] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] vm_util.copy_virtual_disk( [ 1826.592417] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1826.592417] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] session._wait_for_task(vmdk_copy_task) [ 1826.592417] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1826.592417] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] return self.wait_for_task(task_ref) [ 1826.592417] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1826.592417] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] return evt.wait() [ 1826.592417] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1826.592417] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] result = hub.switch() [ 1826.592417] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1826.592417] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] return self.greenlet.switch() [ 1826.592417] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1826.592417] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] self.f(*self.args, **self.kw) [ 1826.592417] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1826.592417] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] raise exceptions.translate_fault(task_info.error) [ 1826.592417] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1826.592417] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Faults: ['InvalidArgument'] [ 1826.592417] env[61868]: ERROR nova.compute.manager [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] [ 1826.593278] env[61868]: DEBUG nova.compute.utils [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1826.594519] env[61868]: DEBUG nova.compute.manager [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Build of instance efb3f108-d3b3-4ebf-a51f-84dc8274f857 was re-scheduled: A specified parameter was not correct: fileType [ 1826.594519] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1826.594887] env[61868]: DEBUG nova.compute.manager [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1826.595060] env[61868]: DEBUG nova.compute.manager [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1826.595232] env[61868]: DEBUG nova.compute.manager [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1826.595397] env[61868]: DEBUG nova.network.neutron [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1826.935388] env[61868]: DEBUG nova.network.neutron [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1826.952384] env[61868]: INFO nova.compute.manager [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Took 0.36 seconds to deallocate network for instance. [ 1827.053642] env[61868]: INFO nova.scheduler.client.report [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Deleted allocations for instance efb3f108-d3b3-4ebf-a51f-84dc8274f857 [ 1827.074391] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e2dacbe9-402b-42c1-803f-4545a5563227 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Lock "efb3f108-d3b3-4ebf-a51f-84dc8274f857" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 608.364s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1827.075578] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d6ae048e-84eb-4407-9624-21d5f93e37c4 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Lock "efb3f108-d3b3-4ebf-a51f-84dc8274f857" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 412.133s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1827.075796] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d6ae048e-84eb-4407-9624-21d5f93e37c4 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Acquiring lock "efb3f108-d3b3-4ebf-a51f-84dc8274f857-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1827.075997] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d6ae048e-84eb-4407-9624-21d5f93e37c4 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Lock "efb3f108-d3b3-4ebf-a51f-84dc8274f857-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1827.076192] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d6ae048e-84eb-4407-9624-21d5f93e37c4 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Lock "efb3f108-d3b3-4ebf-a51f-84dc8274f857-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1827.078186] env[61868]: INFO nova.compute.manager [None req-d6ae048e-84eb-4407-9624-21d5f93e37c4 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Terminating instance [ 1827.079914] env[61868]: DEBUG nova.compute.manager [None req-d6ae048e-84eb-4407-9624-21d5f93e37c4 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1827.080125] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d6ae048e-84eb-4407-9624-21d5f93e37c4 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1827.080599] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ec3a0117-25d6-4613-970f-17cd6949014a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.089969] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83b12745-1d74-471c-97bf-6071d6208921 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.103030] env[61868]: DEBUG nova.compute.manager [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1827.123494] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-d6ae048e-84eb-4407-9624-21d5f93e37c4 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance efb3f108-d3b3-4ebf-a51f-84dc8274f857 could not be found. [ 1827.123718] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d6ae048e-84eb-4407-9624-21d5f93e37c4 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1827.123891] env[61868]: INFO nova.compute.manager [None req-d6ae048e-84eb-4407-9624-21d5f93e37c4 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1827.124167] env[61868]: DEBUG oslo.service.loopingcall [None req-d6ae048e-84eb-4407-9624-21d5f93e37c4 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1827.124392] env[61868]: DEBUG nova.compute.manager [-] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1827.124484] env[61868]: DEBUG nova.network.neutron [-] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1827.155513] env[61868]: DEBUG nova.network.neutron [-] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1827.160493] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1827.160722] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1827.162840] env[61868]: INFO nova.compute.claims [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1827.166148] env[61868]: INFO nova.compute.manager [-] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] Took 0.04 seconds to deallocate network for instance. [ 1827.277636] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d6ae048e-84eb-4407-9624-21d5f93e37c4 tempest-ServerTagsTestJSON-3206530 tempest-ServerTagsTestJSON-3206530-project-member] Lock "efb3f108-d3b3-4ebf-a51f-84dc8274f857" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.202s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1827.279242] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "efb3f108-d3b3-4ebf-a51f-84dc8274f857" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 67.625s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1827.279613] env[61868]: INFO nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: efb3f108-d3b3-4ebf-a51f-84dc8274f857] During sync_power_state the instance has a pending task (deleting). Skip. [ 1827.279933] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "efb3f108-d3b3-4ebf-a51f-84dc8274f857" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1827.377995] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db61d450-0e75-4233-ade5-d16027ae744c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.385841] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a81837c-3106-43fb-9428-3500a51c4189 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.416073] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d53a2289-6a9b-48ea-9cdb-561b1fda2ecf {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.423753] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39104265-a1dc-46e0-bf98-1f7640d7e880 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.437065] env[61868]: DEBUG nova.compute.provider_tree [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1827.445394] env[61868]: DEBUG nova.scheduler.client.report [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1827.462540] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.302s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1827.463060] env[61868]: DEBUG nova.compute.manager [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1827.497458] env[61868]: DEBUG nova.compute.utils [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1827.498951] env[61868]: DEBUG nova.compute.manager [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1827.499207] env[61868]: DEBUG nova.network.neutron [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1827.512422] env[61868]: DEBUG nova.compute.manager [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1827.546685] env[61868]: DEBUG nova.policy [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '42563ff3e832401b9c7a69c9a3feebaa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a33cb95d89ad4e1c8aacebb2a9e16009', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 1827.590066] env[61868]: DEBUG nova.compute.manager [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1827.613949] env[61868]: DEBUG nova.virt.hardware [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1827.614198] env[61868]: DEBUG nova.virt.hardware [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1827.614356] env[61868]: DEBUG nova.virt.hardware [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1827.614538] env[61868]: DEBUG nova.virt.hardware [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1827.614685] env[61868]: DEBUG nova.virt.hardware [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1827.614832] env[61868]: DEBUG nova.virt.hardware [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1827.615037] env[61868]: DEBUG nova.virt.hardware [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1827.615198] env[61868]: DEBUG nova.virt.hardware [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1827.615362] env[61868]: DEBUG nova.virt.hardware [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1827.615523] env[61868]: DEBUG nova.virt.hardware [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1827.615694] env[61868]: DEBUG nova.virt.hardware [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1827.616588] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01bd5db9-05d4-4649-b26f-8cb7aa98beba {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.625405] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16729657-83a3-4970-9cea-1a26891ff088 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.814615] env[61868]: DEBUG nova.network.neutron [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Successfully created port: aaf2b96c-bdf2-45b6-a242-1305fe99123a {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1828.330784] env[61868]: DEBUG nova.network.neutron [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Successfully updated port: aaf2b96c-bdf2-45b6-a242-1305fe99123a {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1828.343971] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "refresh_cache-ad095fd9-abd0-4c75-8d7c-10dcebc2caee" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1828.343971] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquired lock "refresh_cache-ad095fd9-abd0-4c75-8d7c-10dcebc2caee" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1828.343971] env[61868]: DEBUG nova.network.neutron [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1828.383687] env[61868]: DEBUG nova.network.neutron [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1828.739446] env[61868]: DEBUG nova.network.neutron [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Updating instance_info_cache with network_info: [{"id": "aaf2b96c-bdf2-45b6-a242-1305fe99123a", "address": "fa:16:3e:2b:15:ae", "network": {"id": "af866156-816c-4427-a004-8bf63f54c0c2", "bridge": "br-int", "label": "tempest-ServersTestJSON-821930294-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "a33cb95d89ad4e1c8aacebb2a9e16009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba6157eb-73cb-428a-9f46-99081165d7eb", "external-id": "nsx-vlan-transportzone-463", "segmentation_id": 463, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaaf2b96c-bd", "ovs_interfaceid": "aaf2b96c-bdf2-45b6-a242-1305fe99123a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1828.755254] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Releasing lock "refresh_cache-ad095fd9-abd0-4c75-8d7c-10dcebc2caee" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1828.756290] env[61868]: DEBUG nova.compute.manager [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Instance network_info: |[{"id": "aaf2b96c-bdf2-45b6-a242-1305fe99123a", "address": "fa:16:3e:2b:15:ae", "network": {"id": "af866156-816c-4427-a004-8bf63f54c0c2", "bridge": "br-int", "label": "tempest-ServersTestJSON-821930294-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "a33cb95d89ad4e1c8aacebb2a9e16009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba6157eb-73cb-428a-9f46-99081165d7eb", "external-id": "nsx-vlan-transportzone-463", "segmentation_id": 463, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaaf2b96c-bd", "ovs_interfaceid": "aaf2b96c-bdf2-45b6-a242-1305fe99123a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1828.757070] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:15:ae', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ba6157eb-73cb-428a-9f46-99081165d7eb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aaf2b96c-bdf2-45b6-a242-1305fe99123a', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1828.764725] env[61868]: DEBUG oslo.service.loopingcall [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1828.765244] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1828.765489] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed44d31e-a801-4d1f-b6b3-de1ab4982818 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.785856] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1828.785856] env[61868]: value = "task-41169" [ 1828.785856] env[61868]: _type = "Task" [ 1828.785856] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.795355] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41169, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.041662] env[61868]: DEBUG nova.compute.manager [req-a6788ad7-283b-4386-97d4-63df0a071041 req-d968f21f-3f36-4408-a492-ead9849d7e38 service nova] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Received event network-vif-plugged-aaf2b96c-bdf2-45b6-a242-1305fe99123a {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1829.041998] env[61868]: DEBUG oslo_concurrency.lockutils [req-a6788ad7-283b-4386-97d4-63df0a071041 req-d968f21f-3f36-4408-a492-ead9849d7e38 service nova] Acquiring lock "ad095fd9-abd0-4c75-8d7c-10dcebc2caee-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1829.042389] env[61868]: DEBUG oslo_concurrency.lockutils [req-a6788ad7-283b-4386-97d4-63df0a071041 req-d968f21f-3f36-4408-a492-ead9849d7e38 service nova] Lock "ad095fd9-abd0-4c75-8d7c-10dcebc2caee-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1829.042690] env[61868]: DEBUG oslo_concurrency.lockutils [req-a6788ad7-283b-4386-97d4-63df0a071041 req-d968f21f-3f36-4408-a492-ead9849d7e38 service nova] Lock "ad095fd9-abd0-4c75-8d7c-10dcebc2caee-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1829.042971] env[61868]: DEBUG nova.compute.manager [req-a6788ad7-283b-4386-97d4-63df0a071041 req-d968f21f-3f36-4408-a492-ead9849d7e38 service nova] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] No waiting events found dispatching network-vif-plugged-aaf2b96c-bdf2-45b6-a242-1305fe99123a {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1829.043249] env[61868]: WARNING nova.compute.manager [req-a6788ad7-283b-4386-97d4-63df0a071041 req-d968f21f-3f36-4408-a492-ead9849d7e38 service nova] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Received unexpected event network-vif-plugged-aaf2b96c-bdf2-45b6-a242-1305fe99123a for instance with vm_state building and task_state spawning. [ 1829.043533] env[61868]: DEBUG nova.compute.manager [req-a6788ad7-283b-4386-97d4-63df0a071041 req-d968f21f-3f36-4408-a492-ead9849d7e38 service nova] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Received event network-changed-aaf2b96c-bdf2-45b6-a242-1305fe99123a {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1829.043808] env[61868]: DEBUG nova.compute.manager [req-a6788ad7-283b-4386-97d4-63df0a071041 req-d968f21f-3f36-4408-a492-ead9849d7e38 service nova] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Refreshing instance network info cache due to event network-changed-aaf2b96c-bdf2-45b6-a242-1305fe99123a. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1829.044113] env[61868]: DEBUG oslo_concurrency.lockutils [req-a6788ad7-283b-4386-97d4-63df0a071041 req-d968f21f-3f36-4408-a492-ead9849d7e38 service nova] Acquiring lock "refresh_cache-ad095fd9-abd0-4c75-8d7c-10dcebc2caee" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1829.044379] env[61868]: DEBUG oslo_concurrency.lockutils [req-a6788ad7-283b-4386-97d4-63df0a071041 req-d968f21f-3f36-4408-a492-ead9849d7e38 service nova] Acquired lock "refresh_cache-ad095fd9-abd0-4c75-8d7c-10dcebc2caee" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1829.044648] env[61868]: DEBUG nova.network.neutron [req-a6788ad7-283b-4386-97d4-63df0a071041 req-d968f21f-3f36-4408-a492-ead9849d7e38 service nova] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Refreshing network info cache for port aaf2b96c-bdf2-45b6-a242-1305fe99123a {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1829.295948] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41169, 'name': CreateVM_Task, 'duration_secs': 0.30576} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.296161] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1829.296769] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1829.296997] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1829.299909] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20faf857-cb41-4bf2-86fe-748ac4800bce {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.311113] env[61868]: DEBUG nova.network.neutron [req-a6788ad7-283b-4386-97d4-63df0a071041 req-d968f21f-3f36-4408-a492-ead9849d7e38 service nova] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Updated VIF entry in instance network info cache for port aaf2b96c-bdf2-45b6-a242-1305fe99123a. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1829.311402] env[61868]: DEBUG nova.network.neutron [req-a6788ad7-283b-4386-97d4-63df0a071041 req-d968f21f-3f36-4408-a492-ead9849d7e38 service nova] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Updating instance_info_cache with network_info: [{"id": "aaf2b96c-bdf2-45b6-a242-1305fe99123a", "address": "fa:16:3e:2b:15:ae", "network": {"id": "af866156-816c-4427-a004-8bf63f54c0c2", "bridge": "br-int", "label": "tempest-ServersTestJSON-821930294-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "a33cb95d89ad4e1c8aacebb2a9e16009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba6157eb-73cb-428a-9f46-99081165d7eb", "external-id": "nsx-vlan-transportzone-463", "segmentation_id": 463, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaaf2b96c-bd", "ovs_interfaceid": "aaf2b96c-bdf2-45b6-a242-1305fe99123a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1829.323930] env[61868]: DEBUG oslo_concurrency.lockutils [req-a6788ad7-283b-4386-97d4-63df0a071041 req-d968f21f-3f36-4408-a492-ead9849d7e38 service nova] Releasing lock "refresh_cache-ad095fd9-abd0-4c75-8d7c-10dcebc2caee" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1829.341887] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Reconfiguring VM instance to enable vnc on port - 5905 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 1829.342548] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da014347-b71d-427f-b517-c9f6ecda2863 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.359298] env[61868]: DEBUG oslo_vmware.api [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Waiting for the task: (returnval){ [ 1829.359298] env[61868]: value = "task-41170" [ 1829.359298] env[61868]: _type = "Task" [ 1829.359298] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.368050] env[61868]: DEBUG oslo_vmware.api [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Task: {'id': task-41170, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.870433] env[61868]: DEBUG oslo_vmware.api [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Task: {'id': task-41170, 'name': ReconfigVM_Task, 'duration_secs': 0.104608} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.870801] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Reconfigured VM instance to enable vnc on port - 5905 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 1829.871042] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.574s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1829.871319] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1829.871471] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1829.871810] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1829.872104] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-656f27e7-3864-497b-bf3c-fec6ddeb6283 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.877042] env[61868]: DEBUG oslo_vmware.api [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Waiting for the task: (returnval){ [ 1829.877042] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]529f6f95-8402-68ef-1a0f-053208000e3e" [ 1829.877042] env[61868]: _type = "Task" [ 1829.877042] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.885228] env[61868]: DEBUG oslo_vmware.api [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]529f6f95-8402-68ef-1a0f-053208000e3e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.388522] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1830.388878] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1830.389032] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1842.209466] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Acquiring lock "47821dd7-73ae-40eb-b7f2-7b656737cd1f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1842.209823] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Lock "47821dd7-73ae-40eb-b7f2-7b656737cd1f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1851.352686] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1858.352414] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1858.352900] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1858.352900] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 1858.374828] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1858.375066] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1858.375130] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1858.375259] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1858.375382] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1858.375504] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1858.375624] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1858.375744] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1858.375861] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1858.375977] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1858.376129] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 1862.370325] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1863.347167] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1863.370461] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1863.370846] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1863.381516] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1863.381748] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1863.382057] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1863.382231] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1863.383347] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccb182fa-2e29-45b6-b64e-ffbe6bf80e3f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.392317] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12fd5028-83c0-416c-aa24-a729e9da6aa1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.409495] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d70ca83-ea8d-43cf-b0b1-e2b9d90eafda {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.417015] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12061cff-fa2a-4673-9176-437acdaaa3db {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.446186] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181942MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1863.446348] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1863.446473] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1863.512013] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ebd4070e-7944-4d2f-8668-01d0ceca0c67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1863.512184] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d6f7828e-6617-40ca-9f6c-e3a72c328dc9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1863.512437] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 95ba0df5-846c-4e5d-94e6-fd9c43dcc191 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1863.512575] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d69cb3f5-b385-432a-b562-87d0b1b0877b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1863.512694] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 97840d8b-90ee-432e-988a-30548b61381b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1863.512811] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance b1473dd0-5427-496c-a94c-5772635b229f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1863.512928] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance c6c6e502-a2aa-4f9c-be05-bf6b50078abf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1863.513043] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 36be944d-04bc-45cd-8019-173437f8ffa5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1863.513157] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance e453b684-a54b-46b3-b9ea-4ab9352965f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1863.513270] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ad095fd9-abd0-4c75-8d7c-10dcebc2caee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1863.524669] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance fea9d55d-d045-4d4e-b647-044e4729f21c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1863.535858] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 47821dd7-73ae-40eb-b7f2-7b656737cd1f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1863.536108] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1863.536258] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1863.696270] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7299cc09-1b79-4b12-8fed-b25729069bfb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.704471] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-790089ce-628b-45dd-afc3-e1dffef156dd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.735286] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39d96ee1-32d7-40c0-ad39-239b9232ae90 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.743172] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1da8bf4d-d51c-4460-a0d3-608e825e3b37 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.756452] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1863.765523] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1863.783452] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1863.783674] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.337s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1864.765014] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1867.351710] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1868.351093] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1870.351860] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1870.352264] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 1871.798585] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c70c7c3c-a6fd-4748-b875-9d83252b7d0e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "ad095fd9-abd0-4c75-8d7c-10dcebc2caee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1874.764441] env[61868]: WARNING oslo_vmware.rw_handles [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1874.764441] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1874.764441] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1874.764441] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1874.764441] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1874.764441] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 1874.764441] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1874.764441] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1874.764441] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1874.764441] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1874.764441] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1874.764441] env[61868]: ERROR oslo_vmware.rw_handles [ 1874.765062] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/91e84b47-a098-47fd-9168-17bccb7e6622/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1874.766654] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1874.766899] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Copying Virtual Disk [datastore2] vmware_temp/91e84b47-a098-47fd-9168-17bccb7e6622/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/91e84b47-a098-47fd-9168-17bccb7e6622/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1874.767182] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e09594ea-92e6-4b37-9ee8-90c9ecec9797 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.777550] env[61868]: DEBUG oslo_vmware.api [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for the task: (returnval){ [ 1874.777550] env[61868]: value = "task-41171" [ 1874.777550] env[61868]: _type = "Task" [ 1874.777550] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.786261] env[61868]: DEBUG oslo_vmware.api [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': task-41171, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.290008] env[61868]: DEBUG oslo_vmware.exceptions [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1875.290414] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1875.291206] env[61868]: ERROR nova.compute.manager [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1875.291206] env[61868]: Faults: ['InvalidArgument'] [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Traceback (most recent call last): [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] yield resources [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] self.driver.spawn(context, instance, image_meta, [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] self._fetch_image_if_missing(context, vi) [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] image_cache(vi, tmp_image_ds_loc) [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] vm_util.copy_virtual_disk( [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] session._wait_for_task(vmdk_copy_task) [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] return self.wait_for_task(task_ref) [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] return evt.wait() [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] result = hub.switch() [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] return self.greenlet.switch() [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] self.f(*self.args, **self.kw) [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] raise exceptions.translate_fault(task_info.error) [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Faults: ['InvalidArgument'] [ 1875.291206] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] [ 1875.292035] env[61868]: INFO nova.compute.manager [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Terminating instance [ 1875.293310] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1875.293604] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1875.293921] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-274ba56b-5dbb-4202-8c80-ca2ca0a5b07b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.296307] env[61868]: DEBUG nova.compute.manager [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1875.296501] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1875.297218] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5cea7b4-c855-4b8e-809e-35c3ca852ec6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.304853] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1875.305211] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6f63fb7b-470d-49cf-95a3-c11e347ea41c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.307603] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1875.307974] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1875.308842] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-702fac7c-df90-4389-83d2-34e6dbd34e99 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.314306] env[61868]: DEBUG oslo_vmware.api [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Waiting for the task: (returnval){ [ 1875.314306] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]5293c7c3-9772-d14e-7c09-1458ac8ee77f" [ 1875.314306] env[61868]: _type = "Task" [ 1875.314306] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1875.322798] env[61868]: DEBUG oslo_vmware.api [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]5293c7c3-9772-d14e-7c09-1458ac8ee77f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.380782] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1875.381251] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1875.381523] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Deleting the datastore file [datastore2] ebd4070e-7944-4d2f-8668-01d0ceca0c67 {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1875.381900] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-deb7e0f2-a7bb-4b4d-bf90-e06653cd25e8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.388377] env[61868]: DEBUG oslo_vmware.api [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for the task: (returnval){ [ 1875.388377] env[61868]: value = "task-41173" [ 1875.388377] env[61868]: _type = "Task" [ 1875.388377] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1875.396274] env[61868]: DEBUG oslo_vmware.api [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': task-41173, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.824524] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1875.824898] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Creating directory with path [datastore2] vmware_temp/90bb4283-b4b6-4d99-b14d-8ba180db0cf9/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1875.825136] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-874c3017-8b42-434b-8cfe-2e9c391fea13 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.839966] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Created directory with path [datastore2] vmware_temp/90bb4283-b4b6-4d99-b14d-8ba180db0cf9/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1875.840245] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Fetch image to [datastore2] vmware_temp/90bb4283-b4b6-4d99-b14d-8ba180db0cf9/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1875.840501] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/90bb4283-b4b6-4d99-b14d-8ba180db0cf9/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1875.841345] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7a6ea95-43c0-49cd-815f-21d23acfc169 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.848338] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4816eb52-c4b0-4466-85c3-2ff472c113eb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.858429] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16614eed-1d65-44a6-a26d-794b1bfe869d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.888502] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c26ed7e8-3b48-4c3c-b638-34216f415f9e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.900557] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-94be66e2-0709-40ee-9766-6f2e41e44858 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.902532] env[61868]: DEBUG oslo_vmware.api [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': task-41173, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077448} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1875.903110] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1875.903343] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1875.903558] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1875.903764] env[61868]: INFO nova.compute.manager [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1875.905854] env[61868]: DEBUG nova.compute.claims [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1875.906081] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1875.906352] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1875.924962] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1876.073102] env[61868]: DEBUG oslo_vmware.rw_handles [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/90bb4283-b4b6-4d99-b14d-8ba180db0cf9/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1876.131083] env[61868]: DEBUG oslo_vmware.rw_handles [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1876.131306] env[61868]: DEBUG oslo_vmware.rw_handles [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/90bb4283-b4b6-4d99-b14d-8ba180db0cf9/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1876.154042] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb7f0a42-bd31-436a-8c7d-289799ad2f8d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.162241] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44d7161f-ee53-4ab4-a349-5b3be5f5b338 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.192688] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6558bca-abb6-4346-9703-ab9b3cf33338 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.199775] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19d90b80-7aa3-47ce-b079-46d70fd799ce {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.212904] env[61868]: DEBUG nova.compute.provider_tree [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1876.221272] env[61868]: DEBUG nova.scheduler.client.report [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1876.240141] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.333s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1876.240217] env[61868]: ERROR nova.compute.manager [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1876.240217] env[61868]: Faults: ['InvalidArgument'] [ 1876.240217] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Traceback (most recent call last): [ 1876.240217] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1876.240217] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] self.driver.spawn(context, instance, image_meta, [ 1876.240217] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1876.240217] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1876.240217] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1876.240217] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] self._fetch_image_if_missing(context, vi) [ 1876.240217] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1876.240217] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] image_cache(vi, tmp_image_ds_loc) [ 1876.240217] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1876.240217] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] vm_util.copy_virtual_disk( [ 1876.240217] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1876.240217] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] session._wait_for_task(vmdk_copy_task) [ 1876.240217] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1876.240217] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] return self.wait_for_task(task_ref) [ 1876.240217] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1876.240217] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] return evt.wait() [ 1876.240217] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1876.240217] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] result = hub.switch() [ 1876.240217] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1876.240217] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] return self.greenlet.switch() [ 1876.240217] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1876.240217] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] self.f(*self.args, **self.kw) [ 1876.240217] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1876.240217] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] raise exceptions.translate_fault(task_info.error) [ 1876.240217] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1876.240217] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Faults: ['InvalidArgument'] [ 1876.240217] env[61868]: ERROR nova.compute.manager [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] [ 1876.241062] env[61868]: DEBUG nova.compute.utils [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1876.242876] env[61868]: DEBUG nova.compute.manager [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Build of instance ebd4070e-7944-4d2f-8668-01d0ceca0c67 was re-scheduled: A specified parameter was not correct: fileType [ 1876.242876] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1876.243258] env[61868]: DEBUG nova.compute.manager [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1876.243436] env[61868]: DEBUG nova.compute.manager [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1876.243645] env[61868]: DEBUG nova.compute.manager [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1876.243814] env[61868]: DEBUG nova.network.neutron [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1876.514314] env[61868]: DEBUG nova.network.neutron [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1876.530496] env[61868]: INFO nova.compute.manager [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Took 0.29 seconds to deallocate network for instance. [ 1876.635717] env[61868]: INFO nova.scheduler.client.report [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Deleted allocations for instance ebd4070e-7944-4d2f-8668-01d0ceca0c67 [ 1876.656583] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70200a54-aa7c-4c22-8bc4-77b36bf87bbd tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "ebd4070e-7944-4d2f-8668-01d0ceca0c67" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 652.361s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1876.657792] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c89e4471-2898-4e07-b830-be917f8ddb3e tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "ebd4070e-7944-4d2f-8668-01d0ceca0c67" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 456.950s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1876.658038] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c89e4471-2898-4e07-b830-be917f8ddb3e tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "ebd4070e-7944-4d2f-8668-01d0ceca0c67-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1876.658284] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c89e4471-2898-4e07-b830-be917f8ddb3e tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "ebd4070e-7944-4d2f-8668-01d0ceca0c67-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1876.658470] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c89e4471-2898-4e07-b830-be917f8ddb3e tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "ebd4070e-7944-4d2f-8668-01d0ceca0c67-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1876.660653] env[61868]: INFO nova.compute.manager [None req-c89e4471-2898-4e07-b830-be917f8ddb3e tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Terminating instance [ 1876.663244] env[61868]: DEBUG nova.compute.manager [None req-c89e4471-2898-4e07-b830-be917f8ddb3e tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1876.663471] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c89e4471-2898-4e07-b830-be917f8ddb3e tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1876.663745] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-69ebdca5-450d-4c30-9c31-cdaf37f5ca83 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.673957] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44edf797-ad9f-49b2-878e-2130dd858a72 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.685315] env[61868]: DEBUG nova.compute.manager [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1876.711504] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-c89e4471-2898-4e07-b830-be917f8ddb3e tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ebd4070e-7944-4d2f-8668-01d0ceca0c67 could not be found. [ 1876.711504] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c89e4471-2898-4e07-b830-be917f8ddb3e tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1876.711504] env[61868]: INFO nova.compute.manager [None req-c89e4471-2898-4e07-b830-be917f8ddb3e tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1876.711504] env[61868]: DEBUG oslo.service.loopingcall [None req-c89e4471-2898-4e07-b830-be917f8ddb3e tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1876.711504] env[61868]: DEBUG nova.compute.manager [-] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1876.711504] env[61868]: DEBUG nova.network.neutron [-] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1876.738716] env[61868]: DEBUG nova.network.neutron [-] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1876.742688] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1876.742993] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1876.744629] env[61868]: INFO nova.compute.claims [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1876.748987] env[61868]: INFO nova.compute.manager [-] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] Took 0.04 seconds to deallocate network for instance. [ 1876.845604] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c89e4471-2898-4e07-b830-be917f8ddb3e tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "ebd4070e-7944-4d2f-8668-01d0ceca0c67" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.188s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1876.846895] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "ebd4070e-7944-4d2f-8668-01d0ceca0c67" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 117.192s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1876.847682] env[61868]: INFO nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: ebd4070e-7944-4d2f-8668-01d0ceca0c67] During sync_power_state the instance has a pending task (deleting). Skip. [ 1876.847874] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "ebd4070e-7944-4d2f-8668-01d0ceca0c67" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1876.942090] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cd75266-1c12-43d6-9b63-b7867a673e7a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.950306] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e58ac6d-279b-4d19-bf96-bbc3c1d4f782 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.982508] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27cf7f7b-ebb7-4237-a9bc-593656610acf {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.991080] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f20cc32b-c472-4546-aaa5-dcdac6c588e7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.005116] env[61868]: DEBUG nova.compute.provider_tree [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1877.036186] env[61868]: DEBUG nova.scheduler.client.report [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1877.053558] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.310s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1877.054277] env[61868]: DEBUG nova.compute.manager [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1877.092571] env[61868]: DEBUG nova.compute.utils [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1877.094059] env[61868]: DEBUG nova.compute.manager [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1877.094349] env[61868]: DEBUG nova.network.neutron [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1877.105083] env[61868]: DEBUG nova.compute.manager [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1877.142143] env[61868]: DEBUG nova.policy [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '424b508614194ac2ad15e8cb62f2d041', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6f518980782c4dc5ac6efe31af19af16', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 1877.174926] env[61868]: DEBUG nova.compute.manager [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1877.197405] env[61868]: DEBUG nova.virt.hardware [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1877.197889] env[61868]: DEBUG nova.virt.hardware [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1877.198092] env[61868]: DEBUG nova.virt.hardware [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1877.198290] env[61868]: DEBUG nova.virt.hardware [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1877.198439] env[61868]: DEBUG nova.virt.hardware [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1877.198585] env[61868]: DEBUG nova.virt.hardware [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1877.198799] env[61868]: DEBUG nova.virt.hardware [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1877.198971] env[61868]: DEBUG nova.virt.hardware [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1877.199121] env[61868]: DEBUG nova.virt.hardware [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1877.199338] env[61868]: DEBUG nova.virt.hardware [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1877.199450] env[61868]: DEBUG nova.virt.hardware [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1877.200366] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d8e6966-a1ac-49b8-a10e-888ff5ed1134 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.208865] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef45650-f43c-4c75-a05d-f5bfbbc3c9bd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.433491] env[61868]: DEBUG nova.network.neutron [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Successfully created port: f908744f-cc27-4cf9-ae1a-597fb169b177 {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1877.970454] env[61868]: DEBUG nova.network.neutron [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Successfully updated port: f908744f-cc27-4cf9-ae1a-597fb169b177 {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1877.980041] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "refresh_cache-fea9d55d-d045-4d4e-b647-044e4729f21c" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1877.980212] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquired lock "refresh_cache-fea9d55d-d045-4d4e-b647-044e4729f21c" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1877.980354] env[61868]: DEBUG nova.network.neutron [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1878.238889] env[61868]: DEBUG nova.network.neutron [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1878.390672] env[61868]: DEBUG nova.network.neutron [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Updating instance_info_cache with network_info: [{"id": "f908744f-cc27-4cf9-ae1a-597fb169b177", "address": "fa:16:3e:dc:24:fe", "network": {"id": "6da51a65-4f1d-44b5-8bb8-b049cebe1cc1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2119750877-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "6f518980782c4dc5ac6efe31af19af16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf908744f-cc", "ovs_interfaceid": "f908744f-cc27-4cf9-ae1a-597fb169b177", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1878.405912] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Releasing lock "refresh_cache-fea9d55d-d045-4d4e-b647-044e4729f21c" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1878.406326] env[61868]: DEBUG nova.compute.manager [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Instance network_info: |[{"id": "f908744f-cc27-4cf9-ae1a-597fb169b177", "address": "fa:16:3e:dc:24:fe", "network": {"id": "6da51a65-4f1d-44b5-8bb8-b049cebe1cc1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2119750877-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "6f518980782c4dc5ac6efe31af19af16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf908744f-cc", "ovs_interfaceid": "f908744f-cc27-4cf9-ae1a-597fb169b177", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1878.407053] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:24:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c1b8689-a9b4-4972-beb9-6a1c8de1dc88', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f908744f-cc27-4cf9-ae1a-597fb169b177', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1878.414850] env[61868]: DEBUG oslo.service.loopingcall [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1878.415342] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1878.415576] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-463e37c8-ed2c-4590-9a12-4c0579aced5e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.436916] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1878.436916] env[61868]: value = "task-41174" [ 1878.436916] env[61868]: _type = "Task" [ 1878.436916] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.445977] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41174, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.554295] env[61868]: DEBUG nova.compute.manager [req-f961fc90-ae8e-4673-94b6-b0637754af08 req-1fc4aa41-c5e1-49c5-b5aa-a5668e780895 service nova] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Received event network-vif-plugged-f908744f-cc27-4cf9-ae1a-597fb169b177 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1878.554619] env[61868]: DEBUG oslo_concurrency.lockutils [req-f961fc90-ae8e-4673-94b6-b0637754af08 req-1fc4aa41-c5e1-49c5-b5aa-a5668e780895 service nova] Acquiring lock "fea9d55d-d045-4d4e-b647-044e4729f21c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1878.554956] env[61868]: DEBUG oslo_concurrency.lockutils [req-f961fc90-ae8e-4673-94b6-b0637754af08 req-1fc4aa41-c5e1-49c5-b5aa-a5668e780895 service nova] Lock "fea9d55d-d045-4d4e-b647-044e4729f21c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1878.555342] env[61868]: DEBUG oslo_concurrency.lockutils [req-f961fc90-ae8e-4673-94b6-b0637754af08 req-1fc4aa41-c5e1-49c5-b5aa-a5668e780895 service nova] Lock "fea9d55d-d045-4d4e-b647-044e4729f21c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1878.555651] env[61868]: DEBUG nova.compute.manager [req-f961fc90-ae8e-4673-94b6-b0637754af08 req-1fc4aa41-c5e1-49c5-b5aa-a5668e780895 service nova] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] No waiting events found dispatching network-vif-plugged-f908744f-cc27-4cf9-ae1a-597fb169b177 {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1878.555920] env[61868]: WARNING nova.compute.manager [req-f961fc90-ae8e-4673-94b6-b0637754af08 req-1fc4aa41-c5e1-49c5-b5aa-a5668e780895 service nova] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Received unexpected event network-vif-plugged-f908744f-cc27-4cf9-ae1a-597fb169b177 for instance with vm_state building and task_state spawning. [ 1878.556186] env[61868]: DEBUG nova.compute.manager [req-f961fc90-ae8e-4673-94b6-b0637754af08 req-1fc4aa41-c5e1-49c5-b5aa-a5668e780895 service nova] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Received event network-changed-f908744f-cc27-4cf9-ae1a-597fb169b177 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1878.556513] env[61868]: DEBUG nova.compute.manager [req-f961fc90-ae8e-4673-94b6-b0637754af08 req-1fc4aa41-c5e1-49c5-b5aa-a5668e780895 service nova] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Refreshing instance network info cache due to event network-changed-f908744f-cc27-4cf9-ae1a-597fb169b177. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1878.556847] env[61868]: DEBUG oslo_concurrency.lockutils [req-f961fc90-ae8e-4673-94b6-b0637754af08 req-1fc4aa41-c5e1-49c5-b5aa-a5668e780895 service nova] Acquiring lock "refresh_cache-fea9d55d-d045-4d4e-b647-044e4729f21c" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1878.557172] env[61868]: DEBUG oslo_concurrency.lockutils [req-f961fc90-ae8e-4673-94b6-b0637754af08 req-1fc4aa41-c5e1-49c5-b5aa-a5668e780895 service nova] Acquired lock "refresh_cache-fea9d55d-d045-4d4e-b647-044e4729f21c" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1878.557449] env[61868]: DEBUG nova.network.neutron [req-f961fc90-ae8e-4673-94b6-b0637754af08 req-1fc4aa41-c5e1-49c5-b5aa-a5668e780895 service nova] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Refreshing network info cache for port f908744f-cc27-4cf9-ae1a-597fb169b177 {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1878.804573] env[61868]: DEBUG nova.network.neutron [req-f961fc90-ae8e-4673-94b6-b0637754af08 req-1fc4aa41-c5e1-49c5-b5aa-a5668e780895 service nova] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Updated VIF entry in instance network info cache for port f908744f-cc27-4cf9-ae1a-597fb169b177. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1878.804935] env[61868]: DEBUG nova.network.neutron [req-f961fc90-ae8e-4673-94b6-b0637754af08 req-1fc4aa41-c5e1-49c5-b5aa-a5668e780895 service nova] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Updating instance_info_cache with network_info: [{"id": "f908744f-cc27-4cf9-ae1a-597fb169b177", "address": "fa:16:3e:dc:24:fe", "network": {"id": "6da51a65-4f1d-44b5-8bb8-b049cebe1cc1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2119750877-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "6f518980782c4dc5ac6efe31af19af16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf908744f-cc", "ovs_interfaceid": "f908744f-cc27-4cf9-ae1a-597fb169b177", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1878.816111] env[61868]: DEBUG oslo_concurrency.lockutils [req-f961fc90-ae8e-4673-94b6-b0637754af08 req-1fc4aa41-c5e1-49c5-b5aa-a5668e780895 service nova] Releasing lock "refresh_cache-fea9d55d-d045-4d4e-b647-044e4729f21c" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1878.947716] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41174, 'name': CreateVM_Task, 'duration_secs': 0.320409} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.947894] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1878.955762] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1878.956035] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1878.958963] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c0801a1-8b32-4f98-8bd3-7809faf5845b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.992561] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Reconfiguring VM instance to enable vnc on port - 5906 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 1878.992907] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae0c9b64-5682-4620-b4b4-96ae409aef05 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.008912] env[61868]: DEBUG oslo_vmware.api [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for the task: (returnval){ [ 1879.008912] env[61868]: value = "task-41175" [ 1879.008912] env[61868]: _type = "Task" [ 1879.008912] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.016692] env[61868]: DEBUG oslo_vmware.api [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': task-41175, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.519044] env[61868]: DEBUG oslo_vmware.api [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': task-41175, 'name': ReconfigVM_Task, 'duration_secs': 0.107692} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.519339] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Reconfigured VM instance to enable vnc on port - 5906 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 1879.519557] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.564s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1879.519807] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1879.519963] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1879.520315] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1879.520587] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6ca1dcd-8c02-400d-88d0-8b29f44dcfe7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.525508] env[61868]: DEBUG oslo_vmware.api [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for the task: (returnval){ [ 1879.525508] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52dfa8ea-b2af-6f81-aaa8-b9370b778435" [ 1879.525508] env[61868]: _type = "Task" [ 1879.525508] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.533712] env[61868]: DEBUG oslo_vmware.api [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52dfa8ea-b2af-6f81-aaa8-b9370b778435, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.037256] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1880.037611] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1880.037724] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1881.582284] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9a92d1ec-c798-457d-8bc7-17f1aa7c5b0b tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "fea9d55d-d045-4d4e-b647-044e4729f21c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1912.351580] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1918.354348] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1918.354701] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1918.354701] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 1918.376526] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1918.376673] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1918.376805] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1918.376906] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1918.377031] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1918.377154] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1918.377275] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1918.377393] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1918.377510] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1918.377628] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1918.377744] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 1922.370094] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1924.351570] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1924.361889] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1924.362129] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1924.362326] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1924.362487] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1924.363597] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f005442-6d9a-41dc-8779-bf0ed96e08af {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.372454] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eaa3cac-3023-4676-9562-d3e57489bed4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.387695] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb6e1c6f-3e39-46ca-bc2b-a1d12edc176b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.394814] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f2fa6a6-3a8b-4262-a282-079bd7cdb6b5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.423686] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181939MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1924.423868] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1924.424032] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1924.487325] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d6f7828e-6617-40ca-9f6c-e3a72c328dc9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1924.487686] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 95ba0df5-846c-4e5d-94e6-fd9c43dcc191 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1924.487944] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d69cb3f5-b385-432a-b562-87d0b1b0877b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1924.488193] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 97840d8b-90ee-432e-988a-30548b61381b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1924.488427] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance b1473dd0-5427-496c-a94c-5772635b229f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1924.488642] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance c6c6e502-a2aa-4f9c-be05-bf6b50078abf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1924.488856] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 36be944d-04bc-45cd-8019-173437f8ffa5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1924.489078] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance e453b684-a54b-46b3-b9ea-4ab9352965f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1924.489302] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ad095fd9-abd0-4c75-8d7c-10dcebc2caee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1924.489512] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance fea9d55d-d045-4d4e-b647-044e4729f21c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1924.501527] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 47821dd7-73ae-40eb-b7f2-7b656737cd1f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1924.502133] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1924.502443] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1924.649582] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d53147f-ea20-4cc9-a40b-99d14cdf3046 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.657500] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-451f7dda-5452-4287-abf8-688d59f7e754 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.688619] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1a63282-d574-4e13-88d2-63ff5a96bc76 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.696321] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-438f0176-9a4a-46d4-ac88-8a02222db861 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.709623] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1924.718297] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1924.735661] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1924.735862] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.312s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1925.735785] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1925.736195] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1926.103253] env[61868]: WARNING oslo_vmware.rw_handles [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1926.103253] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1926.103253] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1926.103253] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1926.103253] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1926.103253] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 1926.103253] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1926.103253] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1926.103253] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1926.103253] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1926.103253] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1926.103253] env[61868]: ERROR oslo_vmware.rw_handles [ 1926.103748] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/90bb4283-b4b6-4d99-b14d-8ba180db0cf9/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1926.105557] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1926.105819] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Copying Virtual Disk [datastore2] vmware_temp/90bb4283-b4b6-4d99-b14d-8ba180db0cf9/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/90bb4283-b4b6-4d99-b14d-8ba180db0cf9/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1926.106106] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1a98883d-5ad3-4d13-b49e-97b8255377b1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.115799] env[61868]: DEBUG oslo_vmware.api [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Waiting for the task: (returnval){ [ 1926.115799] env[61868]: value = "task-41176" [ 1926.115799] env[61868]: _type = "Task" [ 1926.115799] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.124842] env[61868]: DEBUG oslo_vmware.api [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Task: {'id': task-41176, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.626083] env[61868]: DEBUG oslo_vmware.exceptions [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1926.626387] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1926.626936] env[61868]: ERROR nova.compute.manager [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1926.626936] env[61868]: Faults: ['InvalidArgument'] [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Traceback (most recent call last): [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] yield resources [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] self.driver.spawn(context, instance, image_meta, [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] self._fetch_image_if_missing(context, vi) [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] image_cache(vi, tmp_image_ds_loc) [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] vm_util.copy_virtual_disk( [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] session._wait_for_task(vmdk_copy_task) [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] return self.wait_for_task(task_ref) [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] return evt.wait() [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] result = hub.switch() [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] return self.greenlet.switch() [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] self.f(*self.args, **self.kw) [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] raise exceptions.translate_fault(task_info.error) [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Faults: ['InvalidArgument'] [ 1926.626936] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] [ 1926.628566] env[61868]: INFO nova.compute.manager [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Terminating instance [ 1926.628787] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1926.628993] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1926.629262] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-20a758b9-a5c0-4c3b-97a8-b0529321fc8a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.631512] env[61868]: DEBUG nova.compute.manager [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1926.631706] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1926.632477] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-093a0bb4-157f-4b9b-8fc6-7fff59728396 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.639540] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1926.639764] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-269b07ff-df9e-4b1a-8440-dc6b88a745e3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.642118] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1926.642306] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1926.643234] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d8187f2-4189-47b3-97e2-b4034e5ac633 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.647825] env[61868]: DEBUG oslo_vmware.api [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for the task: (returnval){ [ 1926.647825] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]5233ecb9-58e0-ac62-114c-afa10f0db627" [ 1926.647825] env[61868]: _type = "Task" [ 1926.647825] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.655394] env[61868]: DEBUG oslo_vmware.api [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]5233ecb9-58e0-ac62-114c-afa10f0db627, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.714260] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1926.714517] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1926.714687] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Deleting the datastore file [datastore2] d6f7828e-6617-40ca-9f6c-e3a72c328dc9 {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1926.714974] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-80e8ae28-79ad-40b7-997a-d4a9914cdea7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.722500] env[61868]: DEBUG oslo_vmware.api [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Waiting for the task: (returnval){ [ 1926.722500] env[61868]: value = "task-41178" [ 1926.722500] env[61868]: _type = "Task" [ 1926.722500] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.730748] env[61868]: DEBUG oslo_vmware.api [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Task: {'id': task-41178, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.160390] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1927.160676] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Creating directory with path [datastore2] vmware_temp/f265a6b3-ea65-4222-81d7-ba58a68d7aaf/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1927.160933] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-50a46d6d-2629-4c6d-b905-a64382b8f30c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.173280] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Created directory with path [datastore2] vmware_temp/f265a6b3-ea65-4222-81d7-ba58a68d7aaf/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1927.173583] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Fetch image to [datastore2] vmware_temp/f265a6b3-ea65-4222-81d7-ba58a68d7aaf/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1927.173783] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/f265a6b3-ea65-4222-81d7-ba58a68d7aaf/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1927.174594] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb5e922-3c7c-4b3d-a472-08e0df4a7e1d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.181691] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d55a4e9-e3b9-4eeb-9fb5-29e2aa67ef56 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.193026] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eab6b5e-117b-4d3e-90ad-f8396651ddac {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.227430] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3afcc3f8-30f9-4acc-85f7-1c05df298954 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.237084] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ef8de39b-02d1-4c72-80f2-2f76ea3c5ab0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.239046] env[61868]: DEBUG oslo_vmware.api [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Task: {'id': task-41178, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080391} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.239299] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1927.239480] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1927.239656] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1927.239838] env[61868]: INFO nova.compute.manager [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1927.242013] env[61868]: DEBUG nova.compute.claims [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1927.242210] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1927.242426] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1927.261919] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1927.310329] env[61868]: DEBUG oslo_vmware.rw_handles [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f265a6b3-ea65-4222-81d7-ba58a68d7aaf/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1927.366017] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1927.370942] env[61868]: DEBUG oslo_vmware.rw_handles [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1927.371155] env[61868]: DEBUG oslo_vmware.rw_handles [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f265a6b3-ea65-4222-81d7-ba58a68d7aaf/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1927.467517] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6824826-ebc0-47f9-9fa4-26ac15c86bce {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.476176] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c711cc01-69ac-490d-82e2-ca9e0d9f5221 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.507665] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b610e72-3c99-4ec6-be07-2801e0002128 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.515524] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-215dc005-4428-4d64-b0f9-b2167348e214 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.531148] env[61868]: DEBUG nova.compute.provider_tree [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1927.540065] env[61868]: DEBUG nova.scheduler.client.report [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1927.556054] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.313s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1927.556624] env[61868]: ERROR nova.compute.manager [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1927.556624] env[61868]: Faults: ['InvalidArgument'] [ 1927.556624] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Traceback (most recent call last): [ 1927.556624] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1927.556624] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] self.driver.spawn(context, instance, image_meta, [ 1927.556624] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1927.556624] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1927.556624] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1927.556624] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] self._fetch_image_if_missing(context, vi) [ 1927.556624] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1927.556624] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] image_cache(vi, tmp_image_ds_loc) [ 1927.556624] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1927.556624] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] vm_util.copy_virtual_disk( [ 1927.556624] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1927.556624] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] session._wait_for_task(vmdk_copy_task) [ 1927.556624] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1927.556624] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] return self.wait_for_task(task_ref) [ 1927.556624] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1927.556624] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] return evt.wait() [ 1927.556624] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1927.556624] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] result = hub.switch() [ 1927.556624] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1927.556624] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] return self.greenlet.switch() [ 1927.556624] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1927.556624] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] self.f(*self.args, **self.kw) [ 1927.556624] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1927.556624] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] raise exceptions.translate_fault(task_info.error) [ 1927.556624] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1927.556624] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Faults: ['InvalidArgument'] [ 1927.556624] env[61868]: ERROR nova.compute.manager [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] [ 1927.557484] env[61868]: DEBUG nova.compute.utils [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1927.559215] env[61868]: DEBUG nova.compute.manager [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Build of instance d6f7828e-6617-40ca-9f6c-e3a72c328dc9 was re-scheduled: A specified parameter was not correct: fileType [ 1927.559215] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1927.559602] env[61868]: DEBUG nova.compute.manager [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1927.559784] env[61868]: DEBUG nova.compute.manager [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1927.559965] env[61868]: DEBUG nova.compute.manager [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1927.560162] env[61868]: DEBUG nova.network.neutron [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1927.812380] env[61868]: DEBUG nova.network.neutron [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1927.830270] env[61868]: INFO nova.compute.manager [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Took 0.27 seconds to deallocate network for instance. [ 1927.929387] env[61868]: INFO nova.scheduler.client.report [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Deleted allocations for instance d6f7828e-6617-40ca-9f6c-e3a72c328dc9 [ 1927.949366] env[61868]: DEBUG oslo_concurrency.lockutils [None req-fa2268ed-9286-4836-8782-67c19dd836dd tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Lock "d6f7828e-6617-40ca-9f6c-e3a72c328dc9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 692.963s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1927.950655] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0d2da2c9-af9c-46f6-a306-a7f51569a5c4 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Lock "d6f7828e-6617-40ca-9f6c-e3a72c328dc9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 496.260s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1927.950954] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0d2da2c9-af9c-46f6-a306-a7f51569a5c4 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Acquiring lock "d6f7828e-6617-40ca-9f6c-e3a72c328dc9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1927.951230] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0d2da2c9-af9c-46f6-a306-a7f51569a5c4 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Lock "d6f7828e-6617-40ca-9f6c-e3a72c328dc9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1927.951441] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0d2da2c9-af9c-46f6-a306-a7f51569a5c4 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Lock "d6f7828e-6617-40ca-9f6c-e3a72c328dc9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1927.953861] env[61868]: INFO nova.compute.manager [None req-0d2da2c9-af9c-46f6-a306-a7f51569a5c4 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Terminating instance [ 1927.956177] env[61868]: DEBUG nova.compute.manager [None req-0d2da2c9-af9c-46f6-a306-a7f51569a5c4 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1927.956373] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-0d2da2c9-af9c-46f6-a306-a7f51569a5c4 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1927.956626] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1f513d64-6999-46b3-b3fe-e5055c2098d8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.967616] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c743b907-af27-48d2-9b81-17f037c9340a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.978574] env[61868]: DEBUG nova.compute.manager [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1928.002351] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-0d2da2c9-af9c-46f6-a306-a7f51569a5c4 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d6f7828e-6617-40ca-9f6c-e3a72c328dc9 could not be found. [ 1928.002665] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-0d2da2c9-af9c-46f6-a306-a7f51569a5c4 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1928.003138] env[61868]: INFO nova.compute.manager [None req-0d2da2c9-af9c-46f6-a306-a7f51569a5c4 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1928.003413] env[61868]: DEBUG oslo.service.loopingcall [None req-0d2da2c9-af9c-46f6-a306-a7f51569a5c4 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1928.003649] env[61868]: DEBUG nova.compute.manager [-] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1928.003746] env[61868]: DEBUG nova.network.neutron [-] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1928.030399] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1928.030662] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1928.032336] env[61868]: INFO nova.compute.claims [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1928.039035] env[61868]: DEBUG nova.network.neutron [-] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1928.045815] env[61868]: INFO nova.compute.manager [-] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] Took 0.04 seconds to deallocate network for instance. [ 1928.143456] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0d2da2c9-af9c-46f6-a306-a7f51569a5c4 tempest-AttachVolumeNegativeTest-329748670 tempest-AttachVolumeNegativeTest-329748670-project-member] Lock "d6f7828e-6617-40ca-9f6c-e3a72c328dc9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.193s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1928.144279] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "d6f7828e-6617-40ca-9f6c-e3a72c328dc9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 168.489s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1928.144464] env[61868]: INFO nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: d6f7828e-6617-40ca-9f6c-e3a72c328dc9] During sync_power_state the instance has a pending task (deleting). Skip. [ 1928.144635] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "d6f7828e-6617-40ca-9f6c-e3a72c328dc9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1928.241864] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f59af1a-eea5-40d4-ba60-e04b7d2dc63b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.249901] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd414108-d91f-4d24-8377-dccbd879b0b0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.285287] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95a84247-6a2e-4267-8d4d-35d2aa09c11a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.293245] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-addb36b2-c66c-4549-8b97-eb74c74755e7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.307232] env[61868]: DEBUG nova.compute.provider_tree [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1928.318713] env[61868]: DEBUG nova.scheduler.client.report [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1928.332868] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.302s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1928.333381] env[61868]: DEBUG nova.compute.manager [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1928.370290] env[61868]: DEBUG nova.compute.utils [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1928.371792] env[61868]: DEBUG nova.compute.manager [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1928.372075] env[61868]: DEBUG nova.network.neutron [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1928.388519] env[61868]: DEBUG nova.compute.manager [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1928.418324] env[61868]: DEBUG nova.policy [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f7fb3fdfdab2460495f39c43d1ddfeed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '06caa5ecb11a42d9b84b0be1ecd6490d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 1928.486410] env[61868]: DEBUG nova.compute.manager [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1928.508958] env[61868]: DEBUG nova.virt.hardware [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1928.509211] env[61868]: DEBUG nova.virt.hardware [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1928.509365] env[61868]: DEBUG nova.virt.hardware [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1928.509546] env[61868]: DEBUG nova.virt.hardware [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1928.509693] env[61868]: DEBUG nova.virt.hardware [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1928.509836] env[61868]: DEBUG nova.virt.hardware [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1928.510045] env[61868]: DEBUG nova.virt.hardware [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1928.510204] env[61868]: DEBUG nova.virt.hardware [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1928.510372] env[61868]: DEBUG nova.virt.hardware [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1928.510534] env[61868]: DEBUG nova.virt.hardware [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1928.510705] env[61868]: DEBUG nova.virt.hardware [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1928.511668] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02f38103-ced4-4c72-85f5-f5602a795ab1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.520211] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f04fdd9b-8bef-4660-81c8-2c4b6d47d1f5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.688047] env[61868]: DEBUG nova.network.neutron [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Successfully created port: 6bbbd2da-b17b-4f92-8721-41819ad2061b {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1929.199130] env[61868]: DEBUG nova.network.neutron [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Successfully updated port: 6bbbd2da-b17b-4f92-8721-41819ad2061b {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1929.213957] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Acquiring lock "refresh_cache-47821dd7-73ae-40eb-b7f2-7b656737cd1f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1929.213957] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Acquired lock "refresh_cache-47821dd7-73ae-40eb-b7f2-7b656737cd1f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1929.213957] env[61868]: DEBUG nova.network.neutron [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1929.254038] env[61868]: DEBUG nova.network.neutron [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1929.417049] env[61868]: DEBUG nova.network.neutron [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Updating instance_info_cache with network_info: [{"id": "6bbbd2da-b17b-4f92-8721-41819ad2061b", "address": "fa:16:3e:a1:01:93", "network": {"id": "dbe40d05-e2e3-4304-861f-b4dd2b715ea4", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-353915669-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "06caa5ecb11a42d9b84b0be1ecd6490d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496faa4d-d874-449b-905e-328ddd60b31b", "external-id": "nsx-vlan-transportzone-391", "segmentation_id": 391, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bbbd2da-b1", "ovs_interfaceid": "6bbbd2da-b17b-4f92-8721-41819ad2061b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1929.432850] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Releasing lock "refresh_cache-47821dd7-73ae-40eb-b7f2-7b656737cd1f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1929.433203] env[61868]: DEBUG nova.compute.manager [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Instance network_info: |[{"id": "6bbbd2da-b17b-4f92-8721-41819ad2061b", "address": "fa:16:3e:a1:01:93", "network": {"id": "dbe40d05-e2e3-4304-861f-b4dd2b715ea4", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-353915669-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "06caa5ecb11a42d9b84b0be1ecd6490d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496faa4d-d874-449b-905e-328ddd60b31b", "external-id": "nsx-vlan-transportzone-391", "segmentation_id": 391, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bbbd2da-b1", "ovs_interfaceid": "6bbbd2da-b17b-4f92-8721-41819ad2061b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1929.433922] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:01:93', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '496faa4d-d874-449b-905e-328ddd60b31b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6bbbd2da-b17b-4f92-8721-41819ad2061b', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1929.441400] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Creating folder: Project (06caa5ecb11a42d9b84b0be1ecd6490d). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1929.441924] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aec2abbb-82e4-46f5-ace7-992d9d34ccbd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.453312] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Created folder: Project (06caa5ecb11a42d9b84b0be1ecd6490d) in parent group-v18181. [ 1929.453676] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Creating folder: Instances. Parent ref: group-v18303. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1929.453964] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0baa2fe6-d70a-43ac-85cb-04615a464466 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.463775] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Created folder: Instances in parent group-v18303. [ 1929.464083] env[61868]: DEBUG oslo.service.loopingcall [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1929.464290] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1929.464507] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-00583b52-f180-4a5b-b5e9-8f1e38daca78 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.485113] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1929.485113] env[61868]: value = "task-41181" [ 1929.485113] env[61868]: _type = "Task" [ 1929.485113] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.494367] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41181, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.852840] env[61868]: DEBUG nova.compute.manager [req-0601397e-ff1b-44b5-8a72-2e6061ce5138 req-5d3e887a-e516-4b87-9988-e8a09d2d113e service nova] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Received event network-vif-plugged-6bbbd2da-b17b-4f92-8721-41819ad2061b {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1929.853065] env[61868]: DEBUG oslo_concurrency.lockutils [req-0601397e-ff1b-44b5-8a72-2e6061ce5138 req-5d3e887a-e516-4b87-9988-e8a09d2d113e service nova] Acquiring lock "47821dd7-73ae-40eb-b7f2-7b656737cd1f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1929.853305] env[61868]: DEBUG oslo_concurrency.lockutils [req-0601397e-ff1b-44b5-8a72-2e6061ce5138 req-5d3e887a-e516-4b87-9988-e8a09d2d113e service nova] Lock "47821dd7-73ae-40eb-b7f2-7b656737cd1f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1929.853457] env[61868]: DEBUG oslo_concurrency.lockutils [req-0601397e-ff1b-44b5-8a72-2e6061ce5138 req-5d3e887a-e516-4b87-9988-e8a09d2d113e service nova] Lock "47821dd7-73ae-40eb-b7f2-7b656737cd1f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1929.853635] env[61868]: DEBUG nova.compute.manager [req-0601397e-ff1b-44b5-8a72-2e6061ce5138 req-5d3e887a-e516-4b87-9988-e8a09d2d113e service nova] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] No waiting events found dispatching network-vif-plugged-6bbbd2da-b17b-4f92-8721-41819ad2061b {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1929.853802] env[61868]: WARNING nova.compute.manager [req-0601397e-ff1b-44b5-8a72-2e6061ce5138 req-5d3e887a-e516-4b87-9988-e8a09d2d113e service nova] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Received unexpected event network-vif-plugged-6bbbd2da-b17b-4f92-8721-41819ad2061b for instance with vm_state building and task_state spawning. [ 1929.853967] env[61868]: DEBUG nova.compute.manager [req-0601397e-ff1b-44b5-8a72-2e6061ce5138 req-5d3e887a-e516-4b87-9988-e8a09d2d113e service nova] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Received event network-changed-6bbbd2da-b17b-4f92-8721-41819ad2061b {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1929.854124] env[61868]: DEBUG nova.compute.manager [req-0601397e-ff1b-44b5-8a72-2e6061ce5138 req-5d3e887a-e516-4b87-9988-e8a09d2d113e service nova] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Refreshing instance network info cache due to event network-changed-6bbbd2da-b17b-4f92-8721-41819ad2061b. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1929.854309] env[61868]: DEBUG oslo_concurrency.lockutils [req-0601397e-ff1b-44b5-8a72-2e6061ce5138 req-5d3e887a-e516-4b87-9988-e8a09d2d113e service nova] Acquiring lock "refresh_cache-47821dd7-73ae-40eb-b7f2-7b656737cd1f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1929.854447] env[61868]: DEBUG oslo_concurrency.lockutils [req-0601397e-ff1b-44b5-8a72-2e6061ce5138 req-5d3e887a-e516-4b87-9988-e8a09d2d113e service nova] Acquired lock "refresh_cache-47821dd7-73ae-40eb-b7f2-7b656737cd1f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1929.854610] env[61868]: DEBUG nova.network.neutron [req-0601397e-ff1b-44b5-8a72-2e6061ce5138 req-5d3e887a-e516-4b87-9988-e8a09d2d113e service nova] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Refreshing network info cache for port 6bbbd2da-b17b-4f92-8721-41819ad2061b {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1929.999422] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41181, 'name': CreateVM_Task, 'duration_secs': 0.302859} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.999592] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1930.000246] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1930.000469] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1930.003619] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88122c7e-afb8-4bce-b345-2b6bfaeeadae {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.040664] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Reconfiguring VM instance to enable vnc on port - 5907 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 1930.041138] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a5c2ba3d-c887-43f6-97c9-ffd32ee5f76c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.058073] env[61868]: DEBUG oslo_vmware.api [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Waiting for the task: (returnval){ [ 1930.058073] env[61868]: value = "task-41182" [ 1930.058073] env[61868]: _type = "Task" [ 1930.058073] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.074806] env[61868]: DEBUG oslo_vmware.api [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Task: {'id': task-41182, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.107287] env[61868]: DEBUG nova.network.neutron [req-0601397e-ff1b-44b5-8a72-2e6061ce5138 req-5d3e887a-e516-4b87-9988-e8a09d2d113e service nova] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Updated VIF entry in instance network info cache for port 6bbbd2da-b17b-4f92-8721-41819ad2061b. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1930.107920] env[61868]: DEBUG nova.network.neutron [req-0601397e-ff1b-44b5-8a72-2e6061ce5138 req-5d3e887a-e516-4b87-9988-e8a09d2d113e service nova] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Updating instance_info_cache with network_info: [{"id": "6bbbd2da-b17b-4f92-8721-41819ad2061b", "address": "fa:16:3e:a1:01:93", "network": {"id": "dbe40d05-e2e3-4304-861f-b4dd2b715ea4", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-353915669-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "06caa5ecb11a42d9b84b0be1ecd6490d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496faa4d-d874-449b-905e-328ddd60b31b", "external-id": "nsx-vlan-transportzone-391", "segmentation_id": 391, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bbbd2da-b1", "ovs_interfaceid": "6bbbd2da-b17b-4f92-8721-41819ad2061b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1930.118438] env[61868]: DEBUG oslo_concurrency.lockutils [req-0601397e-ff1b-44b5-8a72-2e6061ce5138 req-5d3e887a-e516-4b87-9988-e8a09d2d113e service nova] Releasing lock "refresh_cache-47821dd7-73ae-40eb-b7f2-7b656737cd1f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1930.351473] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1930.351852] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1930.351852] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 1930.568848] env[61868]: DEBUG oslo_vmware.api [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Task: {'id': task-41182, 'name': ReconfigVM_Task, 'duration_secs': 0.11294} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.569290] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Reconfigured VM instance to enable vnc on port - 5907 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 1930.569539] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.569s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1930.569864] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1930.570033] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1930.570399] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1930.570787] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1abc2cb1-c047-4aca-b7eb-ddafc67db869 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.575489] env[61868]: DEBUG oslo_vmware.api [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Waiting for the task: (returnval){ [ 1930.575489] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]522898f5-e4dd-8ece-f181-f0cc3a9eb94c" [ 1930.575489] env[61868]: _type = "Task" [ 1930.575489] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.583498] env[61868]: DEBUG oslo_vmware.api [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]522898f5-e4dd-8ece-f181-f0cc3a9eb94c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.086036] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1931.086301] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1931.086513] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1973.352270] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1975.871502] env[61868]: WARNING oslo_vmware.rw_handles [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1975.871502] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1975.871502] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1975.871502] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1975.871502] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1975.871502] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 1975.871502] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1975.871502] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1975.871502] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1975.871502] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1975.871502] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1975.871502] env[61868]: ERROR oslo_vmware.rw_handles [ 1975.871502] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/f265a6b3-ea65-4222-81d7-ba58a68d7aaf/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1975.873240] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1975.873531] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Copying Virtual Disk [datastore2] vmware_temp/f265a6b3-ea65-4222-81d7-ba58a68d7aaf/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/f265a6b3-ea65-4222-81d7-ba58a68d7aaf/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1975.873824] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6610abf5-c7a2-4526-837f-b76884fcb64a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.882223] env[61868]: DEBUG oslo_vmware.api [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for the task: (returnval){ [ 1975.882223] env[61868]: value = "task-41183" [ 1975.882223] env[61868]: _type = "Task" [ 1975.882223] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.891461] env[61868]: DEBUG oslo_vmware.api [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': task-41183, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.393398] env[61868]: DEBUG oslo_vmware.exceptions [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1976.393746] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1976.394336] env[61868]: ERROR nova.compute.manager [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1976.394336] env[61868]: Faults: ['InvalidArgument'] [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Traceback (most recent call last): [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] yield resources [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] self.driver.spawn(context, instance, image_meta, [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] self._fetch_image_if_missing(context, vi) [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] image_cache(vi, tmp_image_ds_loc) [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] vm_util.copy_virtual_disk( [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] session._wait_for_task(vmdk_copy_task) [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] return self.wait_for_task(task_ref) [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] return evt.wait() [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] result = hub.switch() [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] return self.greenlet.switch() [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] self.f(*self.args, **self.kw) [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] raise exceptions.translate_fault(task_info.error) [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Faults: ['InvalidArgument'] [ 1976.394336] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] [ 1976.395389] env[61868]: INFO nova.compute.manager [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Terminating instance [ 1976.396213] env[61868]: DEBUG oslo_concurrency.lockutils [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1976.396426] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1976.396693] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c787a5da-0131-4616-8c35-c7ad9b07e237 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.398960] env[61868]: DEBUG nova.compute.manager [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1976.399167] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1976.399901] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00ae75fc-307b-49f8-93b9-8a70013e8b7b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.407320] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1976.407528] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fcfce007-cfa3-4a07-a893-0849e1f8797c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.409873] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1976.410050] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1976.411115] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18bd111f-7f31-4f95-9f2d-ef4897aeb94a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.416491] env[61868]: DEBUG oslo_vmware.api [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Waiting for the task: (returnval){ [ 1976.416491] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]522565ba-f15d-bce5-1744-19194efca328" [ 1976.416491] env[61868]: _type = "Task" [ 1976.416491] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1976.426257] env[61868]: DEBUG oslo_vmware.api [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]522565ba-f15d-bce5-1744-19194efca328, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.485135] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1976.485426] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1976.485550] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Deleting the datastore file [datastore2] 95ba0df5-846c-4e5d-94e6-fd9c43dcc191 {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1976.485842] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2091e988-280d-49bf-94ea-71364103593c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.492620] env[61868]: DEBUG oslo_vmware.api [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for the task: (returnval){ [ 1976.492620] env[61868]: value = "task-41185" [ 1976.492620] env[61868]: _type = "Task" [ 1976.492620] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1976.500926] env[61868]: DEBUG oslo_vmware.api [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': task-41185, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.927397] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1976.927870] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Creating directory with path [datastore2] vmware_temp/18572b6c-d7e5-4f88-8927-c1ddebca84f7/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1976.927955] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cc5f333c-7721-4725-ab2d-3f201c09b844 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.942441] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Created directory with path [datastore2] vmware_temp/18572b6c-d7e5-4f88-8927-c1ddebca84f7/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1976.942580] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Fetch image to [datastore2] vmware_temp/18572b6c-d7e5-4f88-8927-c1ddebca84f7/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1976.942767] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/18572b6c-d7e5-4f88-8927-c1ddebca84f7/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1976.943634] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bacc77a9-a74a-43ae-896c-813735f94a7f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.953600] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c10e4f1-33bd-4467-b503-44d842ec6bec {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.964886] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10dd3b3c-7147-4136-9537-3b967df44e2b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.001610] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a33cb54f-1bd0-447e-a178-21b81aef0acd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.011906] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-fb47a54f-c2c1-4a49-80e7-823ef6531e51 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.013889] env[61868]: DEBUG oslo_vmware.api [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': task-41185, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070179} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.014133] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1977.014350] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1977.014534] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1977.014714] env[61868]: INFO nova.compute.manager [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1977.016917] env[61868]: DEBUG nova.compute.claims [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1977.017137] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1977.017444] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1977.042097] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1977.165684] env[61868]: DEBUG oslo_vmware.rw_handles [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/18572b6c-d7e5-4f88-8927-c1ddebca84f7/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1977.222727] env[61868]: DEBUG oslo_vmware.rw_handles [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1977.222921] env[61868]: DEBUG oslo_vmware.rw_handles [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/18572b6c-d7e5-4f88-8927-c1ddebca84f7/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1977.270091] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dae244b-c1ff-4619-b517-7c2a87a1b568 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.278616] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4331cdb7-ec7a-4138-aa6a-7dfbe4d50107 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.310329] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f0c4d3-d7fc-4dff-9e62-8341b3e2e35e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.318329] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cdc86b5-1861-4447-b1a9-1de462ed4e97 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.331883] env[61868]: DEBUG nova.compute.provider_tree [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1977.341045] env[61868]: DEBUG nova.scheduler.client.report [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1977.359724] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.342s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1977.360436] env[61868]: ERROR nova.compute.manager [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1977.360436] env[61868]: Faults: ['InvalidArgument'] [ 1977.360436] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Traceback (most recent call last): [ 1977.360436] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1977.360436] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] self.driver.spawn(context, instance, image_meta, [ 1977.360436] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1977.360436] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1977.360436] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1977.360436] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] self._fetch_image_if_missing(context, vi) [ 1977.360436] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1977.360436] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] image_cache(vi, tmp_image_ds_loc) [ 1977.360436] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1977.360436] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] vm_util.copy_virtual_disk( [ 1977.360436] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1977.360436] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] session._wait_for_task(vmdk_copy_task) [ 1977.360436] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1977.360436] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] return self.wait_for_task(task_ref) [ 1977.360436] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1977.360436] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] return evt.wait() [ 1977.360436] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1977.360436] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] result = hub.switch() [ 1977.360436] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1977.360436] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] return self.greenlet.switch() [ 1977.360436] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1977.360436] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] self.f(*self.args, **self.kw) [ 1977.360436] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1977.360436] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] raise exceptions.translate_fault(task_info.error) [ 1977.360436] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1977.360436] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Faults: ['InvalidArgument'] [ 1977.360436] env[61868]: ERROR nova.compute.manager [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] [ 1977.361874] env[61868]: DEBUG nova.compute.utils [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1977.363675] env[61868]: DEBUG nova.compute.manager [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Build of instance 95ba0df5-846c-4e5d-94e6-fd9c43dcc191 was re-scheduled: A specified parameter was not correct: fileType [ 1977.363675] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1977.364194] env[61868]: DEBUG nova.compute.manager [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1977.364457] env[61868]: DEBUG nova.compute.manager [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1977.364697] env[61868]: DEBUG nova.compute.manager [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1977.364917] env[61868]: DEBUG nova.network.neutron [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1977.633613] env[61868]: DEBUG nova.network.neutron [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1977.647629] env[61868]: INFO nova.compute.manager [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Took 0.28 seconds to deallocate network for instance. [ 1977.756103] env[61868]: INFO nova.scheduler.client.report [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Deleted allocations for instance 95ba0df5-846c-4e5d-94e6-fd9c43dcc191 [ 1977.778943] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54b434c0-87a4-4acf-9495-f33d6d936275 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "95ba0df5-846c-4e5d-94e6-fd9c43dcc191" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 683.382s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1977.779254] env[61868]: DEBUG oslo_concurrency.lockutils [None req-dbdc02e9-97a2-4cb1-903f-46b494cd007f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "95ba0df5-846c-4e5d-94e6-fd9c43dcc191" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 487.973s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1977.779474] env[61868]: DEBUG oslo_concurrency.lockutils [None req-dbdc02e9-97a2-4cb1-903f-46b494cd007f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "95ba0df5-846c-4e5d-94e6-fd9c43dcc191-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1977.779670] env[61868]: DEBUG oslo_concurrency.lockutils [None req-dbdc02e9-97a2-4cb1-903f-46b494cd007f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "95ba0df5-846c-4e5d-94e6-fd9c43dcc191-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1977.779829] env[61868]: DEBUG oslo_concurrency.lockutils [None req-dbdc02e9-97a2-4cb1-903f-46b494cd007f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "95ba0df5-846c-4e5d-94e6-fd9c43dcc191-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1977.782198] env[61868]: INFO nova.compute.manager [None req-dbdc02e9-97a2-4cb1-903f-46b494cd007f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Terminating instance [ 1977.784261] env[61868]: DEBUG nova.compute.manager [None req-dbdc02e9-97a2-4cb1-903f-46b494cd007f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1977.784889] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-dbdc02e9-97a2-4cb1-903f-46b494cd007f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1977.785165] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-07f78b60-edd8-48a2-b7d0-80eb2722bb4f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.800038] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db741dcd-571f-4a1d-b6cc-16b82e211642 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.831148] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-dbdc02e9-97a2-4cb1-903f-46b494cd007f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 95ba0df5-846c-4e5d-94e6-fd9c43dcc191 could not be found. [ 1977.831519] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-dbdc02e9-97a2-4cb1-903f-46b494cd007f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1977.831766] env[61868]: INFO nova.compute.manager [None req-dbdc02e9-97a2-4cb1-903f-46b494cd007f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1977.832090] env[61868]: DEBUG oslo.service.loopingcall [None req-dbdc02e9-97a2-4cb1-903f-46b494cd007f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1977.832400] env[61868]: DEBUG nova.compute.manager [-] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1977.832544] env[61868]: DEBUG nova.network.neutron [-] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1977.860975] env[61868]: DEBUG nova.network.neutron [-] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1977.869098] env[61868]: INFO nova.compute.manager [-] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] Took 0.04 seconds to deallocate network for instance. [ 1977.960606] env[61868]: DEBUG oslo_concurrency.lockutils [None req-dbdc02e9-97a2-4cb1-903f-46b494cd007f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "95ba0df5-846c-4e5d-94e6-fd9c43dcc191" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.181s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1977.961587] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "95ba0df5-846c-4e5d-94e6-fd9c43dcc191" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 218.306s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1977.961835] env[61868]: INFO nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 95ba0df5-846c-4e5d-94e6-fd9c43dcc191] During sync_power_state the instance has a pending task (deleting). Skip. [ 1977.962056] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "95ba0df5-846c-4e5d-94e6-fd9c43dcc191" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1979.351622] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1979.351921] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1979.351960] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 1979.377875] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1979.378061] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1979.378198] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1979.378327] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1979.378455] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1979.378684] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1979.378746] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1979.378839] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1979.379236] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1979.379413] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 1982.374335] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1983.346837] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1984.309682] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "4a4e7cbe-dc5d-4643-b115-0142b5c978de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1984.309682] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "4a4e7cbe-dc5d-4643-b115-0142b5c978de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1984.325153] env[61868]: DEBUG nova.compute.manager [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1984.385174] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1984.385435] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1984.386877] env[61868]: INFO nova.compute.claims [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1984.562036] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c23e98-f10a-4756-af13-db08c776b85e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.570724] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12622706-0b5b-40e8-b86e-a9ac7dac38b7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.601432] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-879f88fd-1ced-4fed-afe3-73c97bf10a0b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.609228] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b1bb828-e79c-4a23-8f49-1219af6135ea {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.622680] env[61868]: DEBUG nova.compute.provider_tree [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1984.631543] env[61868]: DEBUG nova.scheduler.client.report [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1984.647538] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.262s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1984.648130] env[61868]: DEBUG nova.compute.manager [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1984.684836] env[61868]: DEBUG nova.compute.utils [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1984.686654] env[61868]: DEBUG nova.compute.manager [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1984.686879] env[61868]: DEBUG nova.network.neutron [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1984.698321] env[61868]: DEBUG nova.compute.manager [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1984.733354] env[61868]: DEBUG nova.policy [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a91b3a84ec5c48d896a5bf3d8c568343', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9eabc0f9c1604e90b373219843edfc8d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 1984.770774] env[61868]: DEBUG nova.compute.manager [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1984.796714] env[61868]: DEBUG nova.virt.hardware [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1984.797022] env[61868]: DEBUG nova.virt.hardware [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1984.797198] env[61868]: DEBUG nova.virt.hardware [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1984.797379] env[61868]: DEBUG nova.virt.hardware [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1984.797527] env[61868]: DEBUG nova.virt.hardware [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1984.797672] env[61868]: DEBUG nova.virt.hardware [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1984.797880] env[61868]: DEBUG nova.virt.hardware [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1984.798086] env[61868]: DEBUG nova.virt.hardware [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1984.798263] env[61868]: DEBUG nova.virt.hardware [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1984.798427] env[61868]: DEBUG nova.virt.hardware [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1984.798594] env[61868]: DEBUG nova.virt.hardware [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1984.799560] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b32b0cd4-10f3-4278-81ac-8f0489e75cf0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.808736] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c5ac67-03c7-42da-9688-e6a93f9f6ecd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.024253] env[61868]: DEBUG nova.network.neutron [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Successfully created port: 45b96ccf-14b6-463c-8240-f354320958bd {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1985.350743] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1985.361356] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1985.361630] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1985.361811] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1985.361972] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1985.363135] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e6ca3e8-ad50-4c8a-abd9-7753979beeaf {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.373810] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52cbcf95-77f8-41bb-98a9-cae385e044d7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.390467] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81c849ab-f845-4dd2-b85d-5f5e9693204f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.398970] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e360ba-2c55-4db0-837b-57a35583dc3d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.429495] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181892MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1985.429691] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1985.429858] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1985.500770] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance d69cb3f5-b385-432a-b562-87d0b1b0877b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1985.501212] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 97840d8b-90ee-432e-988a-30548b61381b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1985.501212] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance b1473dd0-5427-496c-a94c-5772635b229f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1985.501315] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance c6c6e502-a2aa-4f9c-be05-bf6b50078abf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1985.501361] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 36be944d-04bc-45cd-8019-173437f8ffa5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1985.501479] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance e453b684-a54b-46b3-b9ea-4ab9352965f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1985.501649] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ad095fd9-abd0-4c75-8d7c-10dcebc2caee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1985.501772] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance fea9d55d-d045-4d4e-b647-044e4729f21c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1985.501885] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 47821dd7-73ae-40eb-b7f2-7b656737cd1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1985.501993] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4a4e7cbe-dc5d-4643-b115-0142b5c978de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1985.502231] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1985.502372] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1985.549832] env[61868]: DEBUG nova.compute.manager [req-9fe34406-12dc-495b-8097-952dcc6b83d2 req-a5f696a3-0c2e-4575-b262-24bf95d0b405 service nova] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Received event network-vif-plugged-45b96ccf-14b6-463c-8240-f354320958bd {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1985.550087] env[61868]: DEBUG oslo_concurrency.lockutils [req-9fe34406-12dc-495b-8097-952dcc6b83d2 req-a5f696a3-0c2e-4575-b262-24bf95d0b405 service nova] Acquiring lock "4a4e7cbe-dc5d-4643-b115-0142b5c978de-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1985.550335] env[61868]: DEBUG oslo_concurrency.lockutils [req-9fe34406-12dc-495b-8097-952dcc6b83d2 req-a5f696a3-0c2e-4575-b262-24bf95d0b405 service nova] Lock "4a4e7cbe-dc5d-4643-b115-0142b5c978de-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1985.550505] env[61868]: DEBUG oslo_concurrency.lockutils [req-9fe34406-12dc-495b-8097-952dcc6b83d2 req-a5f696a3-0c2e-4575-b262-24bf95d0b405 service nova] Lock "4a4e7cbe-dc5d-4643-b115-0142b5c978de-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1985.550693] env[61868]: DEBUG nova.compute.manager [req-9fe34406-12dc-495b-8097-952dcc6b83d2 req-a5f696a3-0c2e-4575-b262-24bf95d0b405 service nova] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] No waiting events found dispatching network-vif-plugged-45b96ccf-14b6-463c-8240-f354320958bd {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1985.550883] env[61868]: WARNING nova.compute.manager [req-9fe34406-12dc-495b-8097-952dcc6b83d2 req-a5f696a3-0c2e-4575-b262-24bf95d0b405 service nova] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Received unexpected event network-vif-plugged-45b96ccf-14b6-463c-8240-f354320958bd for instance with vm_state building and task_state spawning. [ 1985.617654] env[61868]: DEBUG nova.network.neutron [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Successfully updated port: 45b96ccf-14b6-463c-8240-f354320958bd {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1985.629275] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "refresh_cache-4a4e7cbe-dc5d-4643-b115-0142b5c978de" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1985.629275] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquired lock "refresh_cache-4a4e7cbe-dc5d-4643-b115-0142b5c978de" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1985.629275] env[61868]: DEBUG nova.network.neutron [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1985.653365] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32a07ddd-6d93-4361-a3fd-ede9ecd0ac12 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.662249] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aa74064-cf20-4af0-a1c9-7da36d30ea46 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.697331] env[61868]: DEBUG nova.network.neutron [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1985.699735] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-271ed2bb-52fe-4c1e-86ff-6aeb327a48d9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.708187] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-476292af-d192-4ddf-a2bf-32f86dae27c8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.722295] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1985.730975] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1985.750061] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1985.750276] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.320s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1986.047540] env[61868]: DEBUG nova.network.neutron [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Updating instance_info_cache with network_info: [{"id": "45b96ccf-14b6-463c-8240-f354320958bd", "address": "fa:16:3e:bf:d9:bc", "network": {"id": "1018eb28-650c-4602-95a9-5e1826ff57fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-229121660-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "9eabc0f9c1604e90b373219843edfc8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45b96ccf-14", "ovs_interfaceid": "45b96ccf-14b6-463c-8240-f354320958bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1986.061041] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Releasing lock "refresh_cache-4a4e7cbe-dc5d-4643-b115-0142b5c978de" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1986.061401] env[61868]: DEBUG nova.compute.manager [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Instance network_info: |[{"id": "45b96ccf-14b6-463c-8240-f354320958bd", "address": "fa:16:3e:bf:d9:bc", "network": {"id": "1018eb28-650c-4602-95a9-5e1826ff57fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-229121660-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "9eabc0f9c1604e90b373219843edfc8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45b96ccf-14", "ovs_interfaceid": "45b96ccf-14b6-463c-8240-f354320958bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1986.061832] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:d9:bc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '45b96ccf-14b6-463c-8240-f354320958bd', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1986.069797] env[61868]: DEBUG oslo.service.loopingcall [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1986.071027] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1986.071027] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d07cda6d-7e7c-488d-b09d-876215c329a2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.092375] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1986.092375] env[61868]: value = "task-41186" [ 1986.092375] env[61868]: _type = "Task" [ 1986.092375] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1986.101358] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41186, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.603498] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41186, 'name': CreateVM_Task, 'duration_secs': 0.302594} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1986.603853] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1986.604207] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1986.604445] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1986.607320] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04e3bc9d-1a63-4cba-9b67-731b523f61fa {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.639150] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Reconfiguring VM instance to enable vnc on port - 5909 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 1986.639459] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46960ed6-1737-460f-86f8-0d5a339669c4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.655970] env[61868]: DEBUG oslo_vmware.api [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Waiting for the task: (returnval){ [ 1986.655970] env[61868]: value = "task-41187" [ 1986.655970] env[61868]: _type = "Task" [ 1986.655970] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1986.665534] env[61868]: DEBUG oslo_vmware.api [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Task: {'id': task-41187, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.753367] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1987.166300] env[61868]: DEBUG oslo_vmware.api [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Task: {'id': task-41187, 'name': ReconfigVM_Task, 'duration_secs': 0.106451} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1987.166601] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Reconfigured VM instance to enable vnc on port - 5909 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 1987.166822] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.562s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1987.167079] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1987.167224] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1987.167587] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1987.167873] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13e27dbc-c368-4ff5-b5e8-172e700a1602 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.173498] env[61868]: DEBUG oslo_vmware.api [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Waiting for the task: (returnval){ [ 1987.173498] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52f2236d-4610-204f-8d9a-379d2af51c07" [ 1987.173498] env[61868]: _type = "Task" [ 1987.173498] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1987.182094] env[61868]: DEBUG oslo_vmware.api [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52f2236d-4610-204f-8d9a-379d2af51c07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.351164] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1987.351432] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1987.684674] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1987.684938] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1987.685146] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1987.688253] env[61868]: DEBUG nova.compute.manager [req-7d2a18ee-d962-4f92-8e34-123f8c1588af req-eb8c7e2b-c608-4f45-8d44-2e73803f0e34 service nova] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Received event network-changed-45b96ccf-14b6-463c-8240-f354320958bd {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1987.688439] env[61868]: DEBUG nova.compute.manager [req-7d2a18ee-d962-4f92-8e34-123f8c1588af req-eb8c7e2b-c608-4f45-8d44-2e73803f0e34 service nova] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Refreshing instance network info cache due to event network-changed-45b96ccf-14b6-463c-8240-f354320958bd. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1987.688687] env[61868]: DEBUG oslo_concurrency.lockutils [req-7d2a18ee-d962-4f92-8e34-123f8c1588af req-eb8c7e2b-c608-4f45-8d44-2e73803f0e34 service nova] Acquiring lock "refresh_cache-4a4e7cbe-dc5d-4643-b115-0142b5c978de" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1987.688824] env[61868]: DEBUG oslo_concurrency.lockutils [req-7d2a18ee-d962-4f92-8e34-123f8c1588af req-eb8c7e2b-c608-4f45-8d44-2e73803f0e34 service nova] Acquired lock "refresh_cache-4a4e7cbe-dc5d-4643-b115-0142b5c978de" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1987.688924] env[61868]: DEBUG nova.network.neutron [req-7d2a18ee-d962-4f92-8e34-123f8c1588af req-eb8c7e2b-c608-4f45-8d44-2e73803f0e34 service nova] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Refreshing network info cache for port 45b96ccf-14b6-463c-8240-f354320958bd {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1987.904407] env[61868]: DEBUG nova.network.neutron [req-7d2a18ee-d962-4f92-8e34-123f8c1588af req-eb8c7e2b-c608-4f45-8d44-2e73803f0e34 service nova] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Updated VIF entry in instance network info cache for port 45b96ccf-14b6-463c-8240-f354320958bd. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1987.904767] env[61868]: DEBUG nova.network.neutron [req-7d2a18ee-d962-4f92-8e34-123f8c1588af req-eb8c7e2b-c608-4f45-8d44-2e73803f0e34 service nova] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Updating instance_info_cache with network_info: [{"id": "45b96ccf-14b6-463c-8240-f354320958bd", "address": "fa:16:3e:bf:d9:bc", "network": {"id": "1018eb28-650c-4602-95a9-5e1826ff57fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-229121660-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "9eabc0f9c1604e90b373219843edfc8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45b96ccf-14", "ovs_interfaceid": "45b96ccf-14b6-463c-8240-f354320958bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1987.914167] env[61868]: DEBUG oslo_concurrency.lockutils [req-7d2a18ee-d962-4f92-8e34-123f8c1588af req-eb8c7e2b-c608-4f45-8d44-2e73803f0e34 service nova] Releasing lock "refresh_cache-4a4e7cbe-dc5d-4643-b115-0142b5c978de" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1990.351782] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1992.351840] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1992.352245] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 2009.994385] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "8484d90b-13a3-41af-a88a-856a8770a4ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2009.994708] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "8484d90b-13a3-41af-a88a-856a8770a4ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2026.138709] env[61868]: WARNING oslo_vmware.rw_handles [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2026.138709] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2026.138709] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2026.138709] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2026.138709] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2026.138709] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 2026.138709] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2026.138709] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2026.138709] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2026.138709] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2026.138709] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2026.138709] env[61868]: ERROR oslo_vmware.rw_handles [ 2026.139414] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/18572b6c-d7e5-4f88-8927-c1ddebca84f7/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2026.141353] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2026.141643] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Copying Virtual Disk [datastore2] vmware_temp/18572b6c-d7e5-4f88-8927-c1ddebca84f7/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/18572b6c-d7e5-4f88-8927-c1ddebca84f7/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2026.141953] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5fa14d05-b505-48d8-aef8-cda12ed6816b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.149956] env[61868]: DEBUG oslo_vmware.api [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Waiting for the task: (returnval){ [ 2026.149956] env[61868]: value = "task-41188" [ 2026.149956] env[61868]: _type = "Task" [ 2026.149956] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2026.158041] env[61868]: DEBUG oslo_vmware.api [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Task: {'id': task-41188, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.660307] env[61868]: DEBUG oslo_vmware.exceptions [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2026.660589] env[61868]: DEBUG oslo_concurrency.lockutils [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2026.661184] env[61868]: ERROR nova.compute.manager [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2026.661184] env[61868]: Faults: ['InvalidArgument'] [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Traceback (most recent call last): [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] yield resources [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] self.driver.spawn(context, instance, image_meta, [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] self._fetch_image_if_missing(context, vi) [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] image_cache(vi, tmp_image_ds_loc) [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] vm_util.copy_virtual_disk( [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] session._wait_for_task(vmdk_copy_task) [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] return self.wait_for_task(task_ref) [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] return evt.wait() [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] result = hub.switch() [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] return self.greenlet.switch() [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] self.f(*self.args, **self.kw) [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] raise exceptions.translate_fault(task_info.error) [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Faults: ['InvalidArgument'] [ 2026.661184] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] [ 2026.662052] env[61868]: INFO nova.compute.manager [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Terminating instance [ 2026.663105] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2026.663315] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2026.663562] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e2cb3e7a-0d88-4726-9b71-2ba52fca2528 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.665807] env[61868]: DEBUG nova.compute.manager [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2026.665996] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2026.666731] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3950bcff-e3fe-41b8-9277-143a22783032 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.674243] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2026.675199] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c127895a-e9ff-4430-a7d6-087893c52d73 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.676599] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2026.676776] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2026.677424] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2bddbb6a-9edc-4911-ade2-d7510f46d1b6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.682610] env[61868]: DEBUG oslo_vmware.api [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Waiting for the task: (returnval){ [ 2026.682610] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]5298662f-2976-95b9-c11d-08b8452e6c0a" [ 2026.682610] env[61868]: _type = "Task" [ 2026.682610] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2026.695386] env[61868]: DEBUG oslo_vmware.api [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]5298662f-2976-95b9-c11d-08b8452e6c0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.744775] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2026.745038] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2026.745221] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Deleting the datastore file [datastore2] d69cb3f5-b385-432a-b562-87d0b1b0877b {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2026.745514] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-88825152-21c6-4c2d-ac91-b86518239f68 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.751835] env[61868]: DEBUG oslo_vmware.api [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Waiting for the task: (returnval){ [ 2026.751835] env[61868]: value = "task-41190" [ 2026.751835] env[61868]: _type = "Task" [ 2026.751835] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2026.760484] env[61868]: DEBUG oslo_vmware.api [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Task: {'id': task-41190, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.193395] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2027.193764] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Creating directory with path [datastore2] vmware_temp/b26cc27a-d758-44d8-b1f3-44bf9bbfd504/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2027.193896] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b79fb37e-d4cc-434a-925b-5fabb4e6d942 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.205249] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Created directory with path [datastore2] vmware_temp/b26cc27a-d758-44d8-b1f3-44bf9bbfd504/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2027.205449] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Fetch image to [datastore2] vmware_temp/b26cc27a-d758-44d8-b1f3-44bf9bbfd504/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2027.205627] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/b26cc27a-d758-44d8-b1f3-44bf9bbfd504/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2027.206370] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-103c7f7a-6794-4b8c-b30b-d6347d3af373 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.213176] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95f67a3a-58aa-478d-981a-fe6acdfd33c2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.222224] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e69507-664e-4bc6-80c4-eea6ce5e0664 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.252847] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92119691-7c6e-4f6e-9c2f-843456689be5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.264654] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a3047cdc-3709-462f-9c37-5109394b23cc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.266416] env[61868]: DEBUG oslo_vmware.api [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Task: {'id': task-41190, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076432} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2027.266657] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2027.266840] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2027.267013] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2027.267187] env[61868]: INFO nova.compute.manager [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2027.269348] env[61868]: DEBUG nova.compute.claims [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2027.269515] env[61868]: DEBUG oslo_concurrency.lockutils [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2027.269727] env[61868]: DEBUG oslo_concurrency.lockutils [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2027.290895] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2027.356202] env[61868]: DEBUG nova.scheduler.client.report [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Refreshing inventories for resource provider 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2027.369509] env[61868]: DEBUG nova.scheduler.client.report [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Updating ProviderTree inventory for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2027.369739] env[61868]: DEBUG nova.compute.provider_tree [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Updating inventory in ProviderTree for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2027.380093] env[61868]: DEBUG nova.scheduler.client.report [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Refreshing aggregate associations for resource provider 6539a0d3-09f9-481f-a837-7ea10081c3cc, aggregates: None {{(pid=61868) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2027.395397] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2027.395663] env[61868]: ERROR nova.compute.manager [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 790b1826-10c3-4b26-ad5d-ce8b36354025. [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] Traceback (most recent call last): [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] result = getattr(controller, method)(*args, **kwargs) [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] return self._get(image_id) [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] resp, body = self.http_client.get(url, headers=header) [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] return self.request(url, 'GET', **kwargs) [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] return self._handle_response(resp) [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] raise exc.from_response(resp, resp.content) [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] During handling of the above exception, another exception occurred: [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] Traceback (most recent call last): [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] yield resources [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] self.driver.spawn(context, instance, image_meta, [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] self._fetch_image_if_missing(context, vi) [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] image_fetch(context, vi, tmp_image_ds_loc) [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] images.fetch_image( [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2027.395663] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] metadata = IMAGE_API.get(context, image_ref) [ 2027.398023] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 2027.398023] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] return session.show(context, image_id, [ 2027.398023] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2027.398023] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] _reraise_translated_image_exception(image_id) [ 2027.398023] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 2027.398023] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] raise new_exc.with_traceback(exc_trace) [ 2027.398023] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2027.398023] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2027.398023] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2027.398023] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] result = getattr(controller, method)(*args, **kwargs) [ 2027.398023] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2027.398023] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] return self._get(image_id) [ 2027.398023] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2027.398023] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2027.398023] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2027.398023] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] resp, body = self.http_client.get(url, headers=header) [ 2027.398023] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2027.398023] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] return self.request(url, 'GET', **kwargs) [ 2027.398023] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2027.398023] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] return self._handle_response(resp) [ 2027.398023] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2027.398023] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] raise exc.from_response(resp, resp.content) [ 2027.398023] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] nova.exception.ImageNotAuthorized: Not authorized for image 790b1826-10c3-4b26-ad5d-ce8b36354025. [ 2027.398023] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] [ 2027.398023] env[61868]: INFO nova.compute.manager [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Terminating instance [ 2027.398023] env[61868]: DEBUG oslo_concurrency.lockutils [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2027.398023] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2027.399865] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Acquiring lock "refresh_cache-97840d8b-90ee-432e-988a-30548b61381b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2027.399865] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Acquired lock "refresh_cache-97840d8b-90ee-432e-988a-30548b61381b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2027.399865] env[61868]: DEBUG nova.network.neutron [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 2027.400036] env[61868]: DEBUG nova.scheduler.client.report [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Refreshing trait associations for resource provider 6539a0d3-09f9-481f-a837-7ea10081c3cc, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61868) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2027.401948] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e4f4b58d-b3a2-4f1f-8f96-1176f459b358 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.411273] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2027.411452] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2027.412680] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e56b05e-ea57-41cc-9079-fe44812269bc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.418618] env[61868]: DEBUG oslo_vmware.api [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Waiting for the task: (returnval){ [ 2027.418618] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]520a35fd-d463-7535-beab-ea00741474d8" [ 2027.418618] env[61868]: _type = "Task" [ 2027.418618] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2027.439012] env[61868]: DEBUG nova.network.neutron [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 2027.439012] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2027.439012] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Creating directory with path [datastore2] vmware_temp/843f8484-9cb1-45f1-be1f-d14ab61bf770/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2027.439259] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-760ecd16-26b6-4654-8a2f-a97583418bca {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.460422] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Created directory with path [datastore2] vmware_temp/843f8484-9cb1-45f1-be1f-d14ab61bf770/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2027.460624] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Fetch image to [datastore2] vmware_temp/843f8484-9cb1-45f1-be1f-d14ab61bf770/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2027.460790] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/843f8484-9cb1-45f1-be1f-d14ab61bf770/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2027.461600] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff24287a-561c-4859-8152-80677354f4d3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.470636] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc0f990d-050d-4338-b12c-00382f8f7523 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.473420] env[61868]: DEBUG nova.network.neutron [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2027.482061] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8816a986-23f8-4b37-a78e-038d6ca21970 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.488509] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Releasing lock "refresh_cache-97840d8b-90ee-432e-988a-30548b61381b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2027.488879] env[61868]: DEBUG nova.compute.manager [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2027.489071] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2027.490703] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-793dbdd0-6fa1-480a-b7ce-58b6bcc46e63 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.521978] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb1f8ae-a296-48dc-abd1-293305a24b9b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.526727] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2027.527381] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22b27527-24c8-4cb0-be33-84699c1bb73a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.530828] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c5e44b8f-052b-433d-8a8c-d4130e5ea7b3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.555814] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2027.566961] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2027.567191] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2027.567400] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Deleting the datastore file [datastore2] 97840d8b-90ee-432e-988a-30548b61381b {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2027.567617] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aef42be0-af76-4240-9dea-48e18b131453 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.574016] env[61868]: DEBUG oslo_vmware.api [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Waiting for the task: (returnval){ [ 2027.574016] env[61868]: value = "task-41192" [ 2027.574016] env[61868]: _type = "Task" [ 2027.574016] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2027.584984] env[61868]: DEBUG oslo_vmware.api [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Task: {'id': task-41192, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.617423] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c13d5e0b-2a9f-494f-9fee-963bff0cc1dd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.626114] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cccfe16e-024c-442a-a167-6dee77aece05 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.664225] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990e8ebd-7e77-4a03-b9d9-3d743df8aee7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.675804] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1596025e-b40e-4a3e-b9b2-6e1629c4f24b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.689761] env[61868]: DEBUG nova.compute.provider_tree [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2027.698448] env[61868]: DEBUG nova.scheduler.client.report [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2027.706095] env[61868]: DEBUG oslo_vmware.rw_handles [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/843f8484-9cb1-45f1-be1f-d14ab61bf770/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2027.760094] env[61868]: DEBUG oslo_concurrency.lockutils [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.490s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2027.760675] env[61868]: ERROR nova.compute.manager [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2027.760675] env[61868]: Faults: ['InvalidArgument'] [ 2027.760675] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Traceback (most recent call last): [ 2027.760675] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2027.760675] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] self.driver.spawn(context, instance, image_meta, [ 2027.760675] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2027.760675] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2027.760675] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2027.760675] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] self._fetch_image_if_missing(context, vi) [ 2027.760675] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2027.760675] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] image_cache(vi, tmp_image_ds_loc) [ 2027.760675] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2027.760675] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] vm_util.copy_virtual_disk( [ 2027.760675] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2027.760675] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] session._wait_for_task(vmdk_copy_task) [ 2027.760675] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2027.760675] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] return self.wait_for_task(task_ref) [ 2027.760675] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2027.760675] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] return evt.wait() [ 2027.760675] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2027.760675] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] result = hub.switch() [ 2027.760675] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2027.760675] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] return self.greenlet.switch() [ 2027.760675] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2027.760675] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] self.f(*self.args, **self.kw) [ 2027.760675] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2027.760675] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] raise exceptions.translate_fault(task_info.error) [ 2027.760675] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2027.760675] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Faults: ['InvalidArgument'] [ 2027.760675] env[61868]: ERROR nova.compute.manager [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] [ 2027.761511] env[61868]: DEBUG nova.compute.utils [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2027.764159] env[61868]: DEBUG nova.compute.manager [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Build of instance d69cb3f5-b385-432a-b562-87d0b1b0877b was re-scheduled: A specified parameter was not correct: fileType [ 2027.764159] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2027.764575] env[61868]: DEBUG nova.compute.manager [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2027.764754] env[61868]: DEBUG nova.compute.manager [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2027.764925] env[61868]: DEBUG nova.compute.manager [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2027.765091] env[61868]: DEBUG nova.network.neutron [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2027.767306] env[61868]: DEBUG oslo_vmware.rw_handles [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2027.767480] env[61868]: DEBUG oslo_vmware.rw_handles [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/843f8484-9cb1-45f1-be1f-d14ab61bf770/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2028.035877] env[61868]: DEBUG nova.network.neutron [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2028.051046] env[61868]: INFO nova.compute.manager [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Took 0.29 seconds to deallocate network for instance. [ 2028.084876] env[61868]: DEBUG oslo_vmware.api [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Task: {'id': task-41192, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033685} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2028.085136] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2028.085322] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2028.085496] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2028.085669] env[61868]: INFO nova.compute.manager [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2028.085913] env[61868]: DEBUG oslo.service.loopingcall [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2028.086123] env[61868]: DEBUG nova.compute.manager [-] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Skipping network deallocation for instance since networking was not requested. {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 2028.088884] env[61868]: DEBUG nova.compute.claims [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2028.089049] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2028.089269] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2028.158492] env[61868]: INFO nova.scheduler.client.report [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Deleted allocations for instance d69cb3f5-b385-432a-b562-87d0b1b0877b [ 2028.182683] env[61868]: DEBUG oslo_concurrency.lockutils [None req-aeb1acbf-26da-4ebc-9565-e513c4168e3e tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Lock "d69cb3f5-b385-432a-b562-87d0b1b0877b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 691.946s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2028.184135] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a5b0a3c7-96df-4b0b-8f5c-93cc0eb74c8d tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Lock "d69cb3f5-b385-432a-b562-87d0b1b0877b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 494.280s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2028.184474] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a5b0a3c7-96df-4b0b-8f5c-93cc0eb74c8d tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Acquiring lock "d69cb3f5-b385-432a-b562-87d0b1b0877b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2028.184806] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a5b0a3c7-96df-4b0b-8f5c-93cc0eb74c8d tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Lock "d69cb3f5-b385-432a-b562-87d0b1b0877b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2028.185040] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a5b0a3c7-96df-4b0b-8f5c-93cc0eb74c8d tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Lock "d69cb3f5-b385-432a-b562-87d0b1b0877b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2028.189439] env[61868]: INFO nova.compute.manager [None req-a5b0a3c7-96df-4b0b-8f5c-93cc0eb74c8d tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Terminating instance [ 2028.192019] env[61868]: DEBUG nova.compute.manager [None req-a5b0a3c7-96df-4b0b-8f5c-93cc0eb74c8d tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2028.192286] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-a5b0a3c7-96df-4b0b-8f5c-93cc0eb74c8d tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2028.192608] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d6b45911-8c59-4761-9dec-31c9a5ff0bfd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.199484] env[61868]: DEBUG nova.compute.manager [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2028.206424] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f6ba79c-a58f-4e24-8d2f-71ad306612a8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.240871] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-a5b0a3c7-96df-4b0b-8f5c-93cc0eb74c8d tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d69cb3f5-b385-432a-b562-87d0b1b0877b could not be found. [ 2028.241171] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-a5b0a3c7-96df-4b0b-8f5c-93cc0eb74c8d tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2028.241420] env[61868]: INFO nova.compute.manager [None req-a5b0a3c7-96df-4b0b-8f5c-93cc0eb74c8d tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2028.241707] env[61868]: DEBUG oslo.service.loopingcall [None req-a5b0a3c7-96df-4b0b-8f5c-93cc0eb74c8d tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2028.246931] env[61868]: DEBUG nova.compute.manager [-] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2028.247090] env[61868]: DEBUG nova.network.neutron [-] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2028.271917] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2028.280231] env[61868]: DEBUG nova.network.neutron [-] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2028.291436] env[61868]: INFO nova.compute.manager [-] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] Took 0.04 seconds to deallocate network for instance. [ 2028.322718] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53c3379-3224-480b-a08a-9f07bd936c6c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.332481] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f14618-9627-42c4-9de8-7de65684d7b9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.364389] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c02c444c-fd56-4a92-be4a-675d19c52dab {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.375089] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc3025d-789a-4a7e-a23a-cfc7884a7de5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.389954] env[61868]: DEBUG nova.compute.provider_tree [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2028.401913] env[61868]: DEBUG nova.scheduler.client.report [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2028.410579] env[61868]: DEBUG oslo_concurrency.lockutils [None req-a5b0a3c7-96df-4b0b-8f5c-93cc0eb74c8d tempest-ServerRescueNegativeTestJSON-901351471 tempest-ServerRescueNegativeTestJSON-901351471-project-member] Lock "d69cb3f5-b385-432a-b562-87d0b1b0877b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.226s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2028.412100] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "d69cb3f5-b385-432a-b562-87d0b1b0877b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 268.757s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2028.412335] env[61868]: INFO nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: d69cb3f5-b385-432a-b562-87d0b1b0877b] During sync_power_state the instance has a pending task (deleting). Skip. [ 2028.412525] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "d69cb3f5-b385-432a-b562-87d0b1b0877b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2028.419693] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.330s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2028.420456] env[61868]: ERROR nova.compute.manager [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 790b1826-10c3-4b26-ad5d-ce8b36354025. [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] Traceback (most recent call last): [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] result = getattr(controller, method)(*args, **kwargs) [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] return self._get(image_id) [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] resp, body = self.http_client.get(url, headers=header) [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] return self.request(url, 'GET', **kwargs) [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] return self._handle_response(resp) [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] raise exc.from_response(resp, resp.content) [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] During handling of the above exception, another exception occurred: [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] Traceback (most recent call last): [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] self.driver.spawn(context, instance, image_meta, [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] self._fetch_image_if_missing(context, vi) [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] image_fetch(context, vi, tmp_image_ds_loc) [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] images.fetch_image( [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] metadata = IMAGE_API.get(context, image_ref) [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 2028.420456] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] return session.show(context, image_id, [ 2028.421630] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2028.421630] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] _reraise_translated_image_exception(image_id) [ 2028.421630] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 2028.421630] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] raise new_exc.with_traceback(exc_trace) [ 2028.421630] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2028.421630] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2028.421630] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2028.421630] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] result = getattr(controller, method)(*args, **kwargs) [ 2028.421630] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2028.421630] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] return self._get(image_id) [ 2028.421630] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2028.421630] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2028.421630] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2028.421630] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] resp, body = self.http_client.get(url, headers=header) [ 2028.421630] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2028.421630] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] return self.request(url, 'GET', **kwargs) [ 2028.421630] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2028.421630] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] return self._handle_response(resp) [ 2028.421630] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2028.421630] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] raise exc.from_response(resp, resp.content) [ 2028.421630] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] nova.exception.ImageNotAuthorized: Not authorized for image 790b1826-10c3-4b26-ad5d-ce8b36354025. [ 2028.421630] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] [ 2028.421630] env[61868]: DEBUG nova.compute.utils [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Not authorized for image 790b1826-10c3-4b26-ad5d-ce8b36354025. {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2028.422639] env[61868]: DEBUG nova.compute.manager [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Build of instance 97840d8b-90ee-432e-988a-30548b61381b was re-scheduled: Not authorized for image 790b1826-10c3-4b26-ad5d-ce8b36354025. {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2028.423092] env[61868]: DEBUG nova.compute.manager [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2028.423313] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Acquiring lock "refresh_cache-97840d8b-90ee-432e-988a-30548b61381b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2028.423462] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Acquired lock "refresh_cache-97840d8b-90ee-432e-988a-30548b61381b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2028.423621] env[61868]: DEBUG nova.network.neutron [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 2028.425500] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.154s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2028.426879] env[61868]: INFO nova.compute.claims [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2028.451271] env[61868]: DEBUG nova.network.neutron [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 2028.478447] env[61868]: DEBUG nova.network.neutron [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2028.487326] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Releasing lock "refresh_cache-97840d8b-90ee-432e-988a-30548b61381b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2028.487545] env[61868]: DEBUG nova.compute.manager [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2028.487724] env[61868]: DEBUG nova.compute.manager [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Skipping network deallocation for instance since networking was not requested. {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 2028.586761] env[61868]: INFO nova.scheduler.client.report [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Deleted allocations for instance 97840d8b-90ee-432e-988a-30548b61381b [ 2028.610081] env[61868]: DEBUG oslo_concurrency.lockutils [None req-5bcc06d0-7606-4a4f-8dda-5adeb21d510d tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Lock "97840d8b-90ee-432e-988a-30548b61381b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 633.582s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2028.610243] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0a028e6c-2499-4241-80c8-10cc1a9b556e tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Lock "97840d8b-90ee-432e-988a-30548b61381b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 437.856s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2028.610446] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0a028e6c-2499-4241-80c8-10cc1a9b556e tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Acquiring lock "97840d8b-90ee-432e-988a-30548b61381b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2028.610644] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0a028e6c-2499-4241-80c8-10cc1a9b556e tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Lock "97840d8b-90ee-432e-988a-30548b61381b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2028.610838] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0a028e6c-2499-4241-80c8-10cc1a9b556e tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Lock "97840d8b-90ee-432e-988a-30548b61381b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2028.612933] env[61868]: INFO nova.compute.manager [None req-0a028e6c-2499-4241-80c8-10cc1a9b556e tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Terminating instance [ 2028.614564] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0a028e6c-2499-4241-80c8-10cc1a9b556e tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Acquiring lock "refresh_cache-97840d8b-90ee-432e-988a-30548b61381b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2028.614715] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0a028e6c-2499-4241-80c8-10cc1a9b556e tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Acquired lock "refresh_cache-97840d8b-90ee-432e-988a-30548b61381b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2028.614879] env[61868]: DEBUG nova.network.neutron [None req-0a028e6c-2499-4241-80c8-10cc1a9b556e tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 2028.623334] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72ec5615-48d9-4c16-be40-e968d5e741f7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.633110] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7108f4d7-c796-4288-b67e-b471479941a1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.664860] env[61868]: DEBUG nova.network.neutron [None req-0a028e6c-2499-4241-80c8-10cc1a9b556e tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 2028.667189] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6179b9fd-9d03-4da1-9837-e2acbbfb065c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.676105] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c91488d-1f94-4aa9-8a65-1be764e9fae0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.693468] env[61868]: DEBUG nova.compute.provider_tree [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2028.701937] env[61868]: DEBUG nova.scheduler.client.report [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2028.706036] env[61868]: DEBUG nova.network.neutron [None req-0a028e6c-2499-4241-80c8-10cc1a9b556e tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2028.715824] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0a028e6c-2499-4241-80c8-10cc1a9b556e tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Releasing lock "refresh_cache-97840d8b-90ee-432e-988a-30548b61381b" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2028.716236] env[61868]: DEBUG nova.compute.manager [None req-0a028e6c-2499-4241-80c8-10cc1a9b556e tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2028.716477] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-0a028e6c-2499-4241-80c8-10cc1a9b556e tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2028.717091] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-516128fb-5d83-4bc0-8102-f56e453501cd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.722929] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.297s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2028.723449] env[61868]: DEBUG nova.compute.manager [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2028.730721] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-296ac4b1-6a65-42b0-a564-630db5db1e7d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.760585] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-0a028e6c-2499-4241-80c8-10cc1a9b556e tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 97840d8b-90ee-432e-988a-30548b61381b could not be found. [ 2028.760840] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-0a028e6c-2499-4241-80c8-10cc1a9b556e tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2028.761023] env[61868]: INFO nova.compute.manager [None req-0a028e6c-2499-4241-80c8-10cc1a9b556e tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2028.761310] env[61868]: DEBUG oslo.service.loopingcall [None req-0a028e6c-2499-4241-80c8-10cc1a9b556e tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2028.763646] env[61868]: DEBUG nova.compute.utils [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2028.765655] env[61868]: DEBUG nova.compute.manager [-] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2028.765768] env[61868]: DEBUG nova.network.neutron [-] [instance: 97840d8b-90ee-432e-988a-30548b61381b] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2028.767773] env[61868]: DEBUG nova.compute.manager [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2028.767936] env[61868]: DEBUG nova.network.neutron [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2028.779049] env[61868]: DEBUG nova.compute.manager [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2028.854900] env[61868]: DEBUG nova.compute.manager [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2028.857144] env[61868]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61868) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2028.857371] env[61868]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-3cbafbee-0f06-4b78-9c90-13233bac37a6'] [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall self._deallocate_network( [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2028.857881] env[61868]: ERROR oslo.service.loopingcall [ 2028.859255] env[61868]: ERROR nova.compute.manager [None req-0a028e6c-2499-4241-80c8-10cc1a9b556e tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2028.872387] env[61868]: DEBUG nova.policy [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bf74f15b527146bb9bc726e54d220a65', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '01d5fac165e449d49cd6e9d9c7e9d116', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 2028.880136] env[61868]: DEBUG nova.virt.hardware [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2028.880541] env[61868]: DEBUG nova.virt.hardware [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2028.880831] env[61868]: DEBUG nova.virt.hardware [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2028.881432] env[61868]: DEBUG nova.virt.hardware [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2028.881710] env[61868]: DEBUG nova.virt.hardware [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2028.881970] env[61868]: DEBUG nova.virt.hardware [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2028.882342] env[61868]: DEBUG nova.virt.hardware [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2028.882660] env[61868]: DEBUG nova.virt.hardware [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2028.882974] env[61868]: DEBUG nova.virt.hardware [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2028.883255] env[61868]: DEBUG nova.virt.hardware [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2028.883552] env[61868]: DEBUG nova.virt.hardware [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2028.884918] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62ce7a3f-2211-4792-8677-ac5d49f96ac9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.889908] env[61868]: ERROR nova.compute.manager [None req-0a028e6c-2499-4241-80c8-10cc1a9b556e tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] Traceback (most recent call last): [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] ret = obj(*args, **kwargs) [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] exception_handler_v20(status_code, error_body) [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] raise client_exc(message=error_message, [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] Neutron server returns request_ids: ['req-3cbafbee-0f06-4b78-9c90-13233bac37a6'] [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] During handling of the above exception, another exception occurred: [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] Traceback (most recent call last): [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] self._delete_instance(context, instance, bdms) [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] self._shutdown_instance(context, instance, bdms) [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] self._try_deallocate_network(context, instance, requested_networks) [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] with excutils.save_and_reraise_exception(): [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] self.force_reraise() [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] raise self.value [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] _deallocate_network_with_retries() [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] return evt.wait() [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] result = hub.switch() [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] return self.greenlet.switch() [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] result = func(*self.args, **self.kw) [ 2028.889908] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] result = f(*args, **kwargs) [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] self._deallocate_network( [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] self.network_api.deallocate_for_instance( [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] data = neutron.list_ports(**search_opts) [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] ret = obj(*args, **kwargs) [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] return self.list('ports', self.ports_path, retrieve_all, [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] ret = obj(*args, **kwargs) [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] for r in self._pagination(collection, path, **params): [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] res = self.get(path, params=params) [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] ret = obj(*args, **kwargs) [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] return self.retry_request("GET", action, body=body, [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] ret = obj(*args, **kwargs) [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] return self.do_request(method, action, body=body, [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] ret = obj(*args, **kwargs) [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] self._handle_fault_response(status_code, replybody, resp) [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2028.891078] env[61868]: ERROR nova.compute.manager [instance: 97840d8b-90ee-432e-988a-30548b61381b] [ 2028.898667] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41ec31ad-91e3-49bc-aaae-c050aadd3992 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.921164] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0a028e6c-2499-4241-80c8-10cc1a9b556e tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Lock "97840d8b-90ee-432e-988a-30548b61381b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.311s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2028.922264] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "97840d8b-90ee-432e-988a-30548b61381b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 269.267s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2028.922470] env[61868]: INFO nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 97840d8b-90ee-432e-988a-30548b61381b] During sync_power_state the instance has a pending task (deleting). Skip. [ 2028.922646] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "97840d8b-90ee-432e-988a-30548b61381b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2028.986257] env[61868]: INFO nova.compute.manager [None req-0a028e6c-2499-4241-80c8-10cc1a9b556e tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] [instance: 97840d8b-90ee-432e-988a-30548b61381b] Successfully reverted task state from None on failure for instance. [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server [None req-0a028e6c-2499-4241-80c8-10cc1a9b556e tempest-ServerShowV247Test-720549136 tempest-ServerShowV247Test-720549136-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-3cbafbee-0f06-4b78-9c90-13233bac37a6'] [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server raise self.value [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server raise self.value [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server raise self.value [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 414, in inner [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 2028.990245] env[61868]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server raise self.value [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server raise self.value [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server return evt.wait() [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2028.991785] env[61868]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2028.993451] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2028.993451] env[61868]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 2028.993451] env[61868]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2028.993451] env[61868]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2028.993451] env[61868]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2028.993451] env[61868]: ERROR oslo_messaging.rpc.server [ 2029.149334] env[61868]: DEBUG nova.network.neutron [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Successfully created port: ab9d1411-8c68-4ab7-971d-7959c7c85a5b {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2029.714331] env[61868]: DEBUG nova.network.neutron [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Successfully updated port: ab9d1411-8c68-4ab7-971d-7959c7c85a5b {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2029.726871] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "refresh_cache-8484d90b-13a3-41af-a88a-856a8770a4ce" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2029.727010] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquired lock "refresh_cache-8484d90b-13a3-41af-a88a-856a8770a4ce" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2029.727160] env[61868]: DEBUG nova.network.neutron [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 2029.769202] env[61868]: DEBUG nova.network.neutron [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 2029.927652] env[61868]: DEBUG nova.network.neutron [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Updating instance_info_cache with network_info: [{"id": "ab9d1411-8c68-4ab7-971d-7959c7c85a5b", "address": "fa:16:3e:83:dc:e7", "network": {"id": "843ef760-8abf-4cfe-bcb8-9ec2e65dca2f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1286639074-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "01d5fac165e449d49cd6e9d9c7e9d116", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab9d1411-8c", "ovs_interfaceid": "ab9d1411-8c68-4ab7-971d-7959c7c85a5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2029.943001] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Releasing lock "refresh_cache-8484d90b-13a3-41af-a88a-856a8770a4ce" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2029.943325] env[61868]: DEBUG nova.compute.manager [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Instance network_info: |[{"id": "ab9d1411-8c68-4ab7-971d-7959c7c85a5b", "address": "fa:16:3e:83:dc:e7", "network": {"id": "843ef760-8abf-4cfe-bcb8-9ec2e65dca2f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1286639074-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "01d5fac165e449d49cd6e9d9c7e9d116", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab9d1411-8c", "ovs_interfaceid": "ab9d1411-8c68-4ab7-971d-7959c7c85a5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2029.943800] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:dc:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4020f51-6e46-4b73-a79e-9fe3fd51b917', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ab9d1411-8c68-4ab7-971d-7959c7c85a5b', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2029.951890] env[61868]: DEBUG oslo.service.loopingcall [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2029.952457] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2029.952709] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-83f580d9-da79-4126-b91d-5738fec2cb02 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.973921] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2029.973921] env[61868]: value = "task-41193" [ 2029.973921] env[61868]: _type = "Task" [ 2029.973921] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2029.982602] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41193, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.074991] env[61868]: DEBUG nova.compute.manager [req-a2642342-3d84-4b53-8687-59878612c304 req-09a66205-fbdc-4985-b160-7e654d9c5326 service nova] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Received event network-vif-plugged-ab9d1411-8c68-4ab7-971d-7959c7c85a5b {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2030.074991] env[61868]: DEBUG oslo_concurrency.lockutils [req-a2642342-3d84-4b53-8687-59878612c304 req-09a66205-fbdc-4985-b160-7e654d9c5326 service nova] Acquiring lock "8484d90b-13a3-41af-a88a-856a8770a4ce-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2030.075234] env[61868]: DEBUG oslo_concurrency.lockutils [req-a2642342-3d84-4b53-8687-59878612c304 req-09a66205-fbdc-4985-b160-7e654d9c5326 service nova] Lock "8484d90b-13a3-41af-a88a-856a8770a4ce-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2030.075488] env[61868]: DEBUG oslo_concurrency.lockutils [req-a2642342-3d84-4b53-8687-59878612c304 req-09a66205-fbdc-4985-b160-7e654d9c5326 service nova] Lock "8484d90b-13a3-41af-a88a-856a8770a4ce-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2030.075617] env[61868]: DEBUG nova.compute.manager [req-a2642342-3d84-4b53-8687-59878612c304 req-09a66205-fbdc-4985-b160-7e654d9c5326 service nova] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] No waiting events found dispatching network-vif-plugged-ab9d1411-8c68-4ab7-971d-7959c7c85a5b {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2030.075715] env[61868]: WARNING nova.compute.manager [req-a2642342-3d84-4b53-8687-59878612c304 req-09a66205-fbdc-4985-b160-7e654d9c5326 service nova] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Received unexpected event network-vif-plugged-ab9d1411-8c68-4ab7-971d-7959c7c85a5b for instance with vm_state building and task_state spawning. [ 2030.075898] env[61868]: DEBUG nova.compute.manager [req-a2642342-3d84-4b53-8687-59878612c304 req-09a66205-fbdc-4985-b160-7e654d9c5326 service nova] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Received event network-changed-ab9d1411-8c68-4ab7-971d-7959c7c85a5b {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2030.076025] env[61868]: DEBUG nova.compute.manager [req-a2642342-3d84-4b53-8687-59878612c304 req-09a66205-fbdc-4985-b160-7e654d9c5326 service nova] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Refreshing instance network info cache due to event network-changed-ab9d1411-8c68-4ab7-971d-7959c7c85a5b. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2030.076218] env[61868]: DEBUG oslo_concurrency.lockutils [req-a2642342-3d84-4b53-8687-59878612c304 req-09a66205-fbdc-4985-b160-7e654d9c5326 service nova] Acquiring lock "refresh_cache-8484d90b-13a3-41af-a88a-856a8770a4ce" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2030.076355] env[61868]: DEBUG oslo_concurrency.lockutils [req-a2642342-3d84-4b53-8687-59878612c304 req-09a66205-fbdc-4985-b160-7e654d9c5326 service nova] Acquired lock "refresh_cache-8484d90b-13a3-41af-a88a-856a8770a4ce" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2030.076516] env[61868]: DEBUG nova.network.neutron [req-a2642342-3d84-4b53-8687-59878612c304 req-09a66205-fbdc-4985-b160-7e654d9c5326 service nova] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Refreshing network info cache for port ab9d1411-8c68-4ab7-971d-7959c7c85a5b {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 2030.327606] env[61868]: DEBUG nova.network.neutron [req-a2642342-3d84-4b53-8687-59878612c304 req-09a66205-fbdc-4985-b160-7e654d9c5326 service nova] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Updated VIF entry in instance network info cache for port ab9d1411-8c68-4ab7-971d-7959c7c85a5b. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 2030.328487] env[61868]: DEBUG nova.network.neutron [req-a2642342-3d84-4b53-8687-59878612c304 req-09a66205-fbdc-4985-b160-7e654d9c5326 service nova] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Updating instance_info_cache with network_info: [{"id": "ab9d1411-8c68-4ab7-971d-7959c7c85a5b", "address": "fa:16:3e:83:dc:e7", "network": {"id": "843ef760-8abf-4cfe-bcb8-9ec2e65dca2f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1286639074-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "01d5fac165e449d49cd6e9d9c7e9d116", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab9d1411-8c", "ovs_interfaceid": "ab9d1411-8c68-4ab7-971d-7959c7c85a5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2030.341324] env[61868]: DEBUG oslo_concurrency.lockutils [req-a2642342-3d84-4b53-8687-59878612c304 req-09a66205-fbdc-4985-b160-7e654d9c5326 service nova] Releasing lock "refresh_cache-8484d90b-13a3-41af-a88a-856a8770a4ce" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2030.484654] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41193, 'name': CreateVM_Task, 'duration_secs': 0.382967} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2030.485048] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2030.485739] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2030.486113] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2030.488994] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-796c654d-d918-429d-bdb0-a85615ac04e6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.523670] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Reconfiguring VM instance to enable vnc on port - 5900 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 2030.524323] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5a8a8d9-c30a-41ca-8976-5719b7a92166 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.541508] env[61868]: DEBUG oslo_vmware.api [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for the task: (returnval){ [ 2030.541508] env[61868]: value = "task-41194" [ 2030.541508] env[61868]: _type = "Task" [ 2030.541508] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2030.555032] env[61868]: DEBUG oslo_vmware.api [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': task-41194, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.051247] env[61868]: DEBUG oslo_vmware.api [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': task-41194, 'name': ReconfigVM_Task, 'duration_secs': 0.107941} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2031.052424] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Reconfigured VM instance to enable vnc on port - 5900 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 2031.052795] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.567s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2031.053174] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2031.053477] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2031.053898] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2031.054247] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd8b11e8-1a43-44ff-97a0-1789b5835400 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.058892] env[61868]: DEBUG oslo_vmware.api [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for the task: (returnval){ [ 2031.058892] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]5293fb63-87d7-b62c-378e-fcda995aec9f" [ 2031.058892] env[61868]: _type = "Task" [ 2031.058892] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2031.066775] env[61868]: DEBUG oslo_vmware.api [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]5293fb63-87d7-b62c-378e-fcda995aec9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.570830] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2031.571151] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2031.571350] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2033.351139] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2034.358548] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2038.563100] env[61868]: DEBUG oslo_concurrency.lockutils [None req-82bb9b1b-c5b1-416a-811f-659f352d6677 tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Acquiring lock "47821dd7-73ae-40eb-b7f2-7b656737cd1f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2039.351333] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2039.351529] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2039.351709] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 2039.372560] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2039.372734] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2039.372877] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2039.372951] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2039.373073] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2039.373195] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2039.373317] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2039.373432] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2039.373545] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2039.373659] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 2042.351416] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2042.351810] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Cleaning up deleted instances with incomplete migration {{(pid=61868) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 2044.354185] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2046.351588] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2046.361793] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2046.362025] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2046.362188] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2046.362346] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2046.363440] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f21292-5ca0-48c0-9a32-7d05d53de7a6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.372234] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5926ee4c-d494-4866-ac51-a3bdaa58f6cb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.387310] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb653b2-0df1-4848-89c5-29ed90468f79 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.393969] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d7e0d45-196b-4518-9989-3438ea9340b2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.427129] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181890MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2046.427304] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2046.427496] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2046.489644] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance b1473dd0-5427-496c-a94c-5772635b229f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2046.489983] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance c6c6e502-a2aa-4f9c-be05-bf6b50078abf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2046.490231] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 36be944d-04bc-45cd-8019-173437f8ffa5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2046.490504] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance e453b684-a54b-46b3-b9ea-4ab9352965f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2046.490756] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ad095fd9-abd0-4c75-8d7c-10dcebc2caee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2046.490999] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance fea9d55d-d045-4d4e-b647-044e4729f21c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2046.491253] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 47821dd7-73ae-40eb-b7f2-7b656737cd1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2046.491508] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4a4e7cbe-dc5d-4643-b115-0142b5c978de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2046.491747] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8484d90b-13a3-41af-a88a-856a8770a4ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2046.492075] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2046.492336] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2046.610606] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee3857ba-025e-4f3e-83eb-d65d2aa2487b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.618555] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7bbaf9c-95d2-425e-8fc1-c93167aa55b9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.648391] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25235f01-9a62-4f57-8081-c46ea13fb659 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.656278] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0286f886-3361-41b0-922b-0e1f429801bd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.669273] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2046.677555] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2046.695193] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2046.695387] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.268s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2047.695479] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2047.695822] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2048.351668] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2048.351923] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2048.352090] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Cleaning up deleted instances {{(pid=61868) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 2048.361359] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] There are 0 instances to clean {{(pid=61868) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 2051.362287] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2053.352410] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2053.352799] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 2068.382756] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "803c1598-d083-405d-80a1-6adf3fbd2f96" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2068.383185] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "803c1598-d083-405d-80a1-6adf3fbd2f96" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2068.409032] env[61868]: DEBUG nova.compute.manager [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2068.463952] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2068.464232] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2068.465725] env[61868]: INFO nova.compute.claims [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2068.637781] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a4be886-a0d5-408f-9dbd-c79dfe3cdd8c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.646157] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddebeca9-2e38-483d-b045-5977d3e4d61c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.676638] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2533f6db-95da-4d29-8cda-267446b89e55 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.684084] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc22175d-38b5-4802-a98a-1a8e24a4cfb9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.697332] env[61868]: DEBUG nova.compute.provider_tree [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2068.706285] env[61868]: DEBUG nova.scheduler.client.report [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2068.724341] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.260s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2068.724856] env[61868]: DEBUG nova.compute.manager [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2068.761748] env[61868]: DEBUG nova.compute.utils [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2068.763041] env[61868]: DEBUG nova.compute.manager [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2068.763214] env[61868]: DEBUG nova.network.neutron [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2068.776796] env[61868]: DEBUG nova.compute.manager [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2068.814132] env[61868]: DEBUG nova.policy [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '42563ff3e832401b9c7a69c9a3feebaa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a33cb95d89ad4e1c8aacebb2a9e16009', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 2068.847458] env[61868]: DEBUG nova.compute.manager [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2068.869153] env[61868]: DEBUG nova.virt.hardware [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2068.869466] env[61868]: DEBUG nova.virt.hardware [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2068.869686] env[61868]: DEBUG nova.virt.hardware [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2068.869985] env[61868]: DEBUG nova.virt.hardware [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2068.870182] env[61868]: DEBUG nova.virt.hardware [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2068.870370] env[61868]: DEBUG nova.virt.hardware [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2068.870615] env[61868]: DEBUG nova.virt.hardware [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2068.870834] env[61868]: DEBUG nova.virt.hardware [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2068.871044] env[61868]: DEBUG nova.virt.hardware [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2068.871293] env[61868]: DEBUG nova.virt.hardware [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2068.871515] env[61868]: DEBUG nova.virt.hardware [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2068.872402] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d34e260-4687-4652-8086-1a1dc2547ca8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.880498] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f056a51e-f125-4a05-a25f-26e5b4eea2e9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.084086] env[61868]: DEBUG nova.network.neutron [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Successfully created port: 8e860d11-eedf-4814-b317-e4a4afb9eed2 {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2069.554513] env[61868]: DEBUG nova.compute.manager [req-26f3d377-1453-435b-98cc-0b3cfce4a1e8 req-a6213ab9-7106-4735-aefc-cd5fd681daa2 service nova] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Received event network-vif-plugged-8e860d11-eedf-4814-b317-e4a4afb9eed2 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2069.554958] env[61868]: DEBUG oslo_concurrency.lockutils [req-26f3d377-1453-435b-98cc-0b3cfce4a1e8 req-a6213ab9-7106-4735-aefc-cd5fd681daa2 service nova] Acquiring lock "803c1598-d083-405d-80a1-6adf3fbd2f96-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2069.554958] env[61868]: DEBUG oslo_concurrency.lockutils [req-26f3d377-1453-435b-98cc-0b3cfce4a1e8 req-a6213ab9-7106-4735-aefc-cd5fd681daa2 service nova] Lock "803c1598-d083-405d-80a1-6adf3fbd2f96-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2069.555128] env[61868]: DEBUG oslo_concurrency.lockutils [req-26f3d377-1453-435b-98cc-0b3cfce4a1e8 req-a6213ab9-7106-4735-aefc-cd5fd681daa2 service nova] Lock "803c1598-d083-405d-80a1-6adf3fbd2f96-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2069.555271] env[61868]: DEBUG nova.compute.manager [req-26f3d377-1453-435b-98cc-0b3cfce4a1e8 req-a6213ab9-7106-4735-aefc-cd5fd681daa2 service nova] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] No waiting events found dispatching network-vif-plugged-8e860d11-eedf-4814-b317-e4a4afb9eed2 {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2069.555441] env[61868]: WARNING nova.compute.manager [req-26f3d377-1453-435b-98cc-0b3cfce4a1e8 req-a6213ab9-7106-4735-aefc-cd5fd681daa2 service nova] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Received unexpected event network-vif-plugged-8e860d11-eedf-4814-b317-e4a4afb9eed2 for instance with vm_state building and task_state spawning. [ 2069.628771] env[61868]: DEBUG nova.network.neutron [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Successfully updated port: 8e860d11-eedf-4814-b317-e4a4afb9eed2 {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2069.643005] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "refresh_cache-803c1598-d083-405d-80a1-6adf3fbd2f96" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2069.643154] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquired lock "refresh_cache-803c1598-d083-405d-80a1-6adf3fbd2f96" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2069.643312] env[61868]: DEBUG nova.network.neutron [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 2069.679045] env[61868]: DEBUG nova.network.neutron [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 2069.827010] env[61868]: DEBUG nova.network.neutron [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Updating instance_info_cache with network_info: [{"id": "8e860d11-eedf-4814-b317-e4a4afb9eed2", "address": "fa:16:3e:b9:88:fe", "network": {"id": "af866156-816c-4427-a004-8bf63f54c0c2", "bridge": "br-int", "label": "tempest-ServersTestJSON-821930294-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "a33cb95d89ad4e1c8aacebb2a9e16009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba6157eb-73cb-428a-9f46-99081165d7eb", "external-id": "nsx-vlan-transportzone-463", "segmentation_id": 463, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e860d11-ee", "ovs_interfaceid": "8e860d11-eedf-4814-b317-e4a4afb9eed2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2069.844454] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Releasing lock "refresh_cache-803c1598-d083-405d-80a1-6adf3fbd2f96" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2069.844804] env[61868]: DEBUG nova.compute.manager [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Instance network_info: |[{"id": "8e860d11-eedf-4814-b317-e4a4afb9eed2", "address": "fa:16:3e:b9:88:fe", "network": {"id": "af866156-816c-4427-a004-8bf63f54c0c2", "bridge": "br-int", "label": "tempest-ServersTestJSON-821930294-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "a33cb95d89ad4e1c8aacebb2a9e16009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba6157eb-73cb-428a-9f46-99081165d7eb", "external-id": "nsx-vlan-transportzone-463", "segmentation_id": 463, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e860d11-ee", "ovs_interfaceid": "8e860d11-eedf-4814-b317-e4a4afb9eed2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2069.845276] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:88:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ba6157eb-73cb-428a-9f46-99081165d7eb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8e860d11-eedf-4814-b317-e4a4afb9eed2', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2069.853097] env[61868]: DEBUG oslo.service.loopingcall [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2069.853666] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2069.853943] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a4199e77-cab4-41e6-b385-909938386be5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.874503] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2069.874503] env[61868]: value = "task-41195" [ 2069.874503] env[61868]: _type = "Task" [ 2069.874503] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2069.884261] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41195, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2070.387493] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41195, 'name': CreateVM_Task, 'duration_secs': 0.319935} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2070.387693] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2070.388320] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2070.388562] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2070.391417] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc80f544-9c96-4e91-89ef-b082dc5ef75c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.424036] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Reconfiguring VM instance to enable vnc on port - 5908 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 2070.424411] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-515c6264-1ca3-4763-bed3-05ace73f634a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.443949] env[61868]: DEBUG oslo_vmware.api [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Waiting for the task: (returnval){ [ 2070.443949] env[61868]: value = "task-41196" [ 2070.443949] env[61868]: _type = "Task" [ 2070.443949] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2070.454057] env[61868]: DEBUG oslo_vmware.api [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Task: {'id': task-41196, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2070.954726] env[61868]: DEBUG oslo_vmware.api [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Task: {'id': task-41196, 'name': ReconfigVM_Task, 'duration_secs': 0.123356} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2070.955095] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Reconfigured VM instance to enable vnc on port - 5908 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 2070.955151] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.567s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2070.955394] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2070.955545] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2070.955895] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2070.956186] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2dce70f-30eb-4cff-8e62-47f1732ac247 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.961703] env[61868]: DEBUG oslo_vmware.api [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Waiting for the task: (returnval){ [ 2070.961703] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52f13281-7aa4-ee60-316a-c02380348452" [ 2070.961703] env[61868]: _type = "Task" [ 2070.961703] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2070.969755] env[61868]: DEBUG oslo_vmware.api [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52f13281-7aa4-ee60-316a-c02380348452, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.472293] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2071.472584] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2071.472800] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2071.582387] env[61868]: DEBUG nova.compute.manager [req-f090361c-acf4-4211-9aa9-05baf90d9c1b req-39cbe3df-ab28-4bcc-979b-9e6bc4d3eadc service nova] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Received event network-changed-8e860d11-eedf-4814-b317-e4a4afb9eed2 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2071.582551] env[61868]: DEBUG nova.compute.manager [req-f090361c-acf4-4211-9aa9-05baf90d9c1b req-39cbe3df-ab28-4bcc-979b-9e6bc4d3eadc service nova] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Refreshing instance network info cache due to event network-changed-8e860d11-eedf-4814-b317-e4a4afb9eed2. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2071.582766] env[61868]: DEBUG oslo_concurrency.lockutils [req-f090361c-acf4-4211-9aa9-05baf90d9c1b req-39cbe3df-ab28-4bcc-979b-9e6bc4d3eadc service nova] Acquiring lock "refresh_cache-803c1598-d083-405d-80a1-6adf3fbd2f96" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2071.582908] env[61868]: DEBUG oslo_concurrency.lockutils [req-f090361c-acf4-4211-9aa9-05baf90d9c1b req-39cbe3df-ab28-4bcc-979b-9e6bc4d3eadc service nova] Acquired lock "refresh_cache-803c1598-d083-405d-80a1-6adf3fbd2f96" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2071.583069] env[61868]: DEBUG nova.network.neutron [req-f090361c-acf4-4211-9aa9-05baf90d9c1b req-39cbe3df-ab28-4bcc-979b-9e6bc4d3eadc service nova] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Refreshing network info cache for port 8e860d11-eedf-4814-b317-e4a4afb9eed2 {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 2071.807974] env[61868]: DEBUG nova.network.neutron [req-f090361c-acf4-4211-9aa9-05baf90d9c1b req-39cbe3df-ab28-4bcc-979b-9e6bc4d3eadc service nova] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Updated VIF entry in instance network info cache for port 8e860d11-eedf-4814-b317-e4a4afb9eed2. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 2071.808324] env[61868]: DEBUG nova.network.neutron [req-f090361c-acf4-4211-9aa9-05baf90d9c1b req-39cbe3df-ab28-4bcc-979b-9e6bc4d3eadc service nova] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Updating instance_info_cache with network_info: [{"id": "8e860d11-eedf-4814-b317-e4a4afb9eed2", "address": "fa:16:3e:b9:88:fe", "network": {"id": "af866156-816c-4427-a004-8bf63f54c0c2", "bridge": "br-int", "label": "tempest-ServersTestJSON-821930294-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "a33cb95d89ad4e1c8aacebb2a9e16009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba6157eb-73cb-428a-9f46-99081165d7eb", "external-id": "nsx-vlan-transportzone-463", "segmentation_id": 463, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e860d11-ee", "ovs_interfaceid": "8e860d11-eedf-4814-b317-e4a4afb9eed2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2071.819619] env[61868]: DEBUG oslo_concurrency.lockutils [req-f090361c-acf4-4211-9aa9-05baf90d9c1b req-39cbe3df-ab28-4bcc-979b-9e6bc4d3eadc service nova] Releasing lock "refresh_cache-803c1598-d083-405d-80a1-6adf3fbd2f96" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2076.494465] env[61868]: WARNING oslo_vmware.rw_handles [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2076.494465] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2076.494465] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2076.494465] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2076.494465] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2076.494465] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 2076.494465] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2076.494465] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2076.494465] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2076.494465] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2076.494465] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2076.494465] env[61868]: ERROR oslo_vmware.rw_handles [ 2076.495205] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/843f8484-9cb1-45f1-be1f-d14ab61bf770/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2076.496785] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2076.497023] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Copying Virtual Disk [datastore2] vmware_temp/843f8484-9cb1-45f1-be1f-d14ab61bf770/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/843f8484-9cb1-45f1-be1f-d14ab61bf770/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2076.497311] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-91079cb4-0472-4577-a9ca-3aac79d1be4f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.506319] env[61868]: DEBUG oslo_vmware.api [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Waiting for the task: (returnval){ [ 2076.506319] env[61868]: value = "task-41197" [ 2076.506319] env[61868]: _type = "Task" [ 2076.506319] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2076.514625] env[61868]: DEBUG oslo_vmware.api [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Task: {'id': task-41197, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.016528] env[61868]: DEBUG oslo_vmware.exceptions [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2077.016769] env[61868]: DEBUG oslo_concurrency.lockutils [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2077.017338] env[61868]: ERROR nova.compute.manager [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2077.017338] env[61868]: Faults: ['InvalidArgument'] [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] Traceback (most recent call last): [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] yield resources [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] self.driver.spawn(context, instance, image_meta, [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] self._fetch_image_if_missing(context, vi) [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] image_cache(vi, tmp_image_ds_loc) [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] vm_util.copy_virtual_disk( [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] session._wait_for_task(vmdk_copy_task) [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] return self.wait_for_task(task_ref) [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] return evt.wait() [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] result = hub.switch() [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] return self.greenlet.switch() [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] self.f(*self.args, **self.kw) [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] raise exceptions.translate_fault(task_info.error) [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] Faults: ['InvalidArgument'] [ 2077.017338] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] [ 2077.018366] env[61868]: INFO nova.compute.manager [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Terminating instance [ 2077.019234] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2077.019441] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2077.019682] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a717a9a-ebb8-4adb-9f49-b729eb91bfd4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.023407] env[61868]: DEBUG nova.compute.manager [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2077.023603] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2077.024334] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1647b2b0-9bd6-4ade-8ca9-f0b520de9aae {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.030989] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2077.030989] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c1cb386-ced1-4e1a-b669-9069868d5784 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.033118] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2077.033324] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2077.034281] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93ad3b29-1a77-4992-b97e-483cbff3c40e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.038956] env[61868]: DEBUG oslo_vmware.api [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Waiting for the task: (returnval){ [ 2077.038956] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]521d9131-f3a0-b4d0-06b1-4ad9bfe928c2" [ 2077.038956] env[61868]: _type = "Task" [ 2077.038956] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.046147] env[61868]: DEBUG oslo_vmware.api [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]521d9131-f3a0-b4d0-06b1-4ad9bfe928c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.099866] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2077.100425] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2077.100769] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Deleting the datastore file [datastore2] b1473dd0-5427-496c-a94c-5772635b229f {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2077.101184] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-77c251dd-476b-4862-9d2f-c9c5e3ba5dee {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.108401] env[61868]: DEBUG oslo_vmware.api [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Waiting for the task: (returnval){ [ 2077.108401] env[61868]: value = "task-41199" [ 2077.108401] env[61868]: _type = "Task" [ 2077.108401] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.118578] env[61868]: DEBUG oslo_vmware.api [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Task: {'id': task-41199, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.549966] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2077.550337] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Creating directory with path [datastore2] vmware_temp/12b94c69-da0a-44e0-9406-7577a9e38a14/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2077.550390] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cca88419-5dca-485a-a9d5-ce088c38b3c6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.561793] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Created directory with path [datastore2] vmware_temp/12b94c69-da0a-44e0-9406-7577a9e38a14/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2077.562103] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Fetch image to [datastore2] vmware_temp/12b94c69-da0a-44e0-9406-7577a9e38a14/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2077.562391] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/12b94c69-da0a-44e0-9406-7577a9e38a14/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2077.563139] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-019b8560-6e33-4837-bf8c-37c7bf8f6624 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.569845] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2edf3786-6bb9-4bd2-8e37-dbc2bfea13b4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.578802] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a94413b-29e7-4dc7-ae49-7dbb14fa49ba {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.609441] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7371b205-d9f3-4b5c-80af-ba7edf516644 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.621282] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-0f9063e0-7d39-4f8d-a959-c5ba5ad9b155 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.623075] env[61868]: DEBUG oslo_vmware.api [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Task: {'id': task-41199, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074884} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2077.623319] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2077.623510] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2077.623680] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2077.623856] env[61868]: INFO nova.compute.manager [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2077.625945] env[61868]: DEBUG nova.compute.claims [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2077.626494] env[61868]: DEBUG oslo_concurrency.lockutils [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2077.626494] env[61868]: DEBUG oslo_concurrency.lockutils [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2077.649588] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2077.702626] env[61868]: DEBUG oslo_vmware.rw_handles [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/12b94c69-da0a-44e0-9406-7577a9e38a14/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2077.758872] env[61868]: DEBUG oslo_vmware.rw_handles [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2077.759150] env[61868]: DEBUG oslo_vmware.rw_handles [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/12b94c69-da0a-44e0-9406-7577a9e38a14/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2077.851926] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b60491a-b83b-4196-b208-0f69fb5c476d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.859947] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9937b44b-7d91-445b-bc1f-19f44943aff8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.889578] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa672c7-53d7-46bf-befe-9fce026138f9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.897431] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-454e1be6-b840-4285-a0fc-a09aecccbc05 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.910766] env[61868]: DEBUG nova.compute.provider_tree [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2077.918984] env[61868]: DEBUG nova.scheduler.client.report [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2077.935085] env[61868]: DEBUG oslo_concurrency.lockutils [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.309s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2077.935624] env[61868]: ERROR nova.compute.manager [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2077.935624] env[61868]: Faults: ['InvalidArgument'] [ 2077.935624] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] Traceback (most recent call last): [ 2077.935624] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2077.935624] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] self.driver.spawn(context, instance, image_meta, [ 2077.935624] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2077.935624] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2077.935624] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2077.935624] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] self._fetch_image_if_missing(context, vi) [ 2077.935624] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2077.935624] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] image_cache(vi, tmp_image_ds_loc) [ 2077.935624] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2077.935624] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] vm_util.copy_virtual_disk( [ 2077.935624] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2077.935624] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] session._wait_for_task(vmdk_copy_task) [ 2077.935624] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2077.935624] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] return self.wait_for_task(task_ref) [ 2077.935624] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2077.935624] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] return evt.wait() [ 2077.935624] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2077.935624] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] result = hub.switch() [ 2077.935624] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2077.935624] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] return self.greenlet.switch() [ 2077.935624] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2077.935624] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] self.f(*self.args, **self.kw) [ 2077.935624] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2077.935624] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] raise exceptions.translate_fault(task_info.error) [ 2077.935624] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2077.935624] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] Faults: ['InvalidArgument'] [ 2077.935624] env[61868]: ERROR nova.compute.manager [instance: b1473dd0-5427-496c-a94c-5772635b229f] [ 2077.936547] env[61868]: DEBUG nova.compute.utils [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2077.937903] env[61868]: DEBUG nova.compute.manager [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Build of instance b1473dd0-5427-496c-a94c-5772635b229f was re-scheduled: A specified parameter was not correct: fileType [ 2077.937903] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2077.938331] env[61868]: DEBUG nova.compute.manager [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2077.938537] env[61868]: DEBUG nova.compute.manager [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2077.939195] env[61868]: DEBUG nova.compute.manager [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2077.939195] env[61868]: DEBUG nova.network.neutron [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2078.225899] env[61868]: DEBUG nova.network.neutron [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2078.240807] env[61868]: INFO nova.compute.manager [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Took 0.30 seconds to deallocate network for instance. [ 2078.369510] env[61868]: INFO nova.scheduler.client.report [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Deleted allocations for instance b1473dd0-5427-496c-a94c-5772635b229f [ 2078.395905] env[61868]: DEBUG oslo_concurrency.lockutils [None req-db473365-0f04-40ef-aaa0-66e555ff696f tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Lock "b1473dd0-5427-496c-a94c-5772635b229f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 634.781s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2078.395905] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0f9b464c-e123-4fd5-8ae5-5ea586ba268e tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Lock "b1473dd0-5427-496c-a94c-5772635b229f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 438.009s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2078.395905] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0f9b464c-e123-4fd5-8ae5-5ea586ba268e tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Acquiring lock "b1473dd0-5427-496c-a94c-5772635b229f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2078.395905] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0f9b464c-e123-4fd5-8ae5-5ea586ba268e tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Lock "b1473dd0-5427-496c-a94c-5772635b229f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2078.395905] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0f9b464c-e123-4fd5-8ae5-5ea586ba268e tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Lock "b1473dd0-5427-496c-a94c-5772635b229f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2078.396886] env[61868]: INFO nova.compute.manager [None req-0f9b464c-e123-4fd5-8ae5-5ea586ba268e tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Terminating instance [ 2078.400245] env[61868]: DEBUG nova.compute.manager [None req-0f9b464c-e123-4fd5-8ae5-5ea586ba268e tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2078.400475] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-0f9b464c-e123-4fd5-8ae5-5ea586ba268e tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2078.400727] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5425ebcf-cb89-40bd-9fb9-294852f1a679 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.414219] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b88193c-dc61-495e-b287-e437e36b7584 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.443038] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-0f9b464c-e123-4fd5-8ae5-5ea586ba268e tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b1473dd0-5427-496c-a94c-5772635b229f could not be found. [ 2078.443264] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-0f9b464c-e123-4fd5-8ae5-5ea586ba268e tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2078.443446] env[61868]: INFO nova.compute.manager [None req-0f9b464c-e123-4fd5-8ae5-5ea586ba268e tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2078.443694] env[61868]: DEBUG oslo.service.loopingcall [None req-0f9b464c-e123-4fd5-8ae5-5ea586ba268e tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2078.443940] env[61868]: DEBUG nova.compute.manager [-] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2078.444050] env[61868]: DEBUG nova.network.neutron [-] [instance: b1473dd0-5427-496c-a94c-5772635b229f] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2078.472421] env[61868]: DEBUG nova.network.neutron [-] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2078.482815] env[61868]: INFO nova.compute.manager [-] [instance: b1473dd0-5427-496c-a94c-5772635b229f] Took 0.04 seconds to deallocate network for instance. [ 2078.567908] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "e97ea5c4-163c-4870-9744-3f20cf57f53f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2078.568321] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "e97ea5c4-163c-4870-9744-3f20cf57f53f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2078.579912] env[61868]: DEBUG nova.compute.manager [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2078.598483] env[61868]: DEBUG oslo_concurrency.lockutils [None req-0f9b464c-e123-4fd5-8ae5-5ea586ba268e tempest-MultipleCreateTestJSON-1001388970 tempest-MultipleCreateTestJSON-1001388970-project-member] Lock "b1473dd0-5427-496c-a94c-5772635b229f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.206s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2078.599337] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "b1473dd0-5427-496c-a94c-5772635b229f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 318.944s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2078.599529] env[61868]: INFO nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: b1473dd0-5427-496c-a94c-5772635b229f] During sync_power_state the instance has a pending task (deleting). Skip. [ 2078.604778] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "b1473dd0-5427-496c-a94c-5772635b229f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.005s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2078.633019] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2078.633315] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2078.634911] env[61868]: INFO nova.compute.claims [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2078.827890] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4672e768-7fde-4887-8b69-7ca0b51b0e34 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.836407] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73a4bf6d-7b78-402d-ab64-0b9d44f295fe {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.867786] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac96de6d-2057-4829-b39d-75efedabaca9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.875538] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7bfd3df-d304-4d3f-bfd2-9a018b9e6146 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.888809] env[61868]: DEBUG nova.compute.provider_tree [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2078.898006] env[61868]: DEBUG nova.scheduler.client.report [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2078.914178] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.281s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2078.914678] env[61868]: DEBUG nova.compute.manager [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2078.953468] env[61868]: DEBUG nova.compute.utils [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2078.954747] env[61868]: DEBUG nova.compute.manager [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2078.954917] env[61868]: DEBUG nova.network.neutron [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2078.965699] env[61868]: DEBUG nova.compute.manager [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2079.001995] env[61868]: DEBUG nova.policy [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '424b508614194ac2ad15e8cb62f2d041', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6f518980782c4dc5ac6efe31af19af16', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 2079.041373] env[61868]: DEBUG nova.compute.manager [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2079.064503] env[61868]: DEBUG nova.virt.hardware [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2079.064827] env[61868]: DEBUG nova.virt.hardware [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2079.065036] env[61868]: DEBUG nova.virt.hardware [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2079.065270] env[61868]: DEBUG nova.virt.hardware [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2079.065543] env[61868]: DEBUG nova.virt.hardware [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2079.065763] env[61868]: DEBUG nova.virt.hardware [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2079.066024] env[61868]: DEBUG nova.virt.hardware [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2079.066235] env[61868]: DEBUG nova.virt.hardware [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2079.066457] env[61868]: DEBUG nova.virt.hardware [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2079.066668] env[61868]: DEBUG nova.virt.hardware [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2079.066893] env[61868]: DEBUG nova.virt.hardware [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2079.067804] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bea9a89-d41b-416f-be06-7a4d575442df {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.076568] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03556393-eb47-4d9a-8f1c-be0742a1995a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.467154] env[61868]: DEBUG nova.network.neutron [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Successfully created port: 7c9939e9-a11a-441c-b322-8a22016e0ae4 {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2079.981571] env[61868]: DEBUG nova.network.neutron [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Successfully updated port: 7c9939e9-a11a-441c-b322-8a22016e0ae4 {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2079.998628] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "refresh_cache-e97ea5c4-163c-4870-9744-3f20cf57f53f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2079.998795] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquired lock "refresh_cache-e97ea5c4-163c-4870-9744-3f20cf57f53f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2079.998939] env[61868]: DEBUG nova.network.neutron [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 2080.039829] env[61868]: DEBUG nova.network.neutron [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 2080.192090] env[61868]: DEBUG nova.network.neutron [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Updating instance_info_cache with network_info: [{"id": "7c9939e9-a11a-441c-b322-8a22016e0ae4", "address": "fa:16:3e:b8:e4:6e", "network": {"id": "6da51a65-4f1d-44b5-8bb8-b049cebe1cc1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2119750877-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "6f518980782c4dc5ac6efe31af19af16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c9939e9-a1", "ovs_interfaceid": "7c9939e9-a11a-441c-b322-8a22016e0ae4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2080.207727] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Releasing lock "refresh_cache-e97ea5c4-163c-4870-9744-3f20cf57f53f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2080.207727] env[61868]: DEBUG nova.compute.manager [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Instance network_info: |[{"id": "7c9939e9-a11a-441c-b322-8a22016e0ae4", "address": "fa:16:3e:b8:e4:6e", "network": {"id": "6da51a65-4f1d-44b5-8bb8-b049cebe1cc1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2119750877-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "6f518980782c4dc5ac6efe31af19af16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c9939e9-a1", "ovs_interfaceid": "7c9939e9-a11a-441c-b322-8a22016e0ae4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2080.207992] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:e4:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c1b8689-a9b4-4972-beb9-6a1c8de1dc88', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7c9939e9-a11a-441c-b322-8a22016e0ae4', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2080.215467] env[61868]: DEBUG oslo.service.loopingcall [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2080.216017] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2080.216254] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f68229ed-858b-4e07-accb-c087d9d90feb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.238593] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2080.238593] env[61868]: value = "task-41200" [ 2080.238593] env[61868]: _type = "Task" [ 2080.238593] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2080.247500] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41200, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2080.276573] env[61868]: DEBUG nova.compute.manager [req-b53b0f77-e049-4d84-baef-206f4fb53648 req-839ac467-52bf-4016-ad7b-b288225ca6ec service nova] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Received event network-vif-plugged-7c9939e9-a11a-441c-b322-8a22016e0ae4 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2080.276836] env[61868]: DEBUG oslo_concurrency.lockutils [req-b53b0f77-e049-4d84-baef-206f4fb53648 req-839ac467-52bf-4016-ad7b-b288225ca6ec service nova] Acquiring lock "e97ea5c4-163c-4870-9744-3f20cf57f53f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2080.277055] env[61868]: DEBUG oslo_concurrency.lockutils [req-b53b0f77-e049-4d84-baef-206f4fb53648 req-839ac467-52bf-4016-ad7b-b288225ca6ec service nova] Lock "e97ea5c4-163c-4870-9744-3f20cf57f53f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2080.277242] env[61868]: DEBUG oslo_concurrency.lockutils [req-b53b0f77-e049-4d84-baef-206f4fb53648 req-839ac467-52bf-4016-ad7b-b288225ca6ec service nova] Lock "e97ea5c4-163c-4870-9744-3f20cf57f53f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2080.277603] env[61868]: DEBUG nova.compute.manager [req-b53b0f77-e049-4d84-baef-206f4fb53648 req-839ac467-52bf-4016-ad7b-b288225ca6ec service nova] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] No waiting events found dispatching network-vif-plugged-7c9939e9-a11a-441c-b322-8a22016e0ae4 {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2080.277603] env[61868]: WARNING nova.compute.manager [req-b53b0f77-e049-4d84-baef-206f4fb53648 req-839ac467-52bf-4016-ad7b-b288225ca6ec service nova] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Received unexpected event network-vif-plugged-7c9939e9-a11a-441c-b322-8a22016e0ae4 for instance with vm_state building and task_state spawning. [ 2080.277768] env[61868]: DEBUG nova.compute.manager [req-b53b0f77-e049-4d84-baef-206f4fb53648 req-839ac467-52bf-4016-ad7b-b288225ca6ec service nova] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Received event network-changed-7c9939e9-a11a-441c-b322-8a22016e0ae4 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2080.278052] env[61868]: DEBUG nova.compute.manager [req-b53b0f77-e049-4d84-baef-206f4fb53648 req-839ac467-52bf-4016-ad7b-b288225ca6ec service nova] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Refreshing instance network info cache due to event network-changed-7c9939e9-a11a-441c-b322-8a22016e0ae4. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2080.278242] env[61868]: DEBUG oslo_concurrency.lockutils [req-b53b0f77-e049-4d84-baef-206f4fb53648 req-839ac467-52bf-4016-ad7b-b288225ca6ec service nova] Acquiring lock "refresh_cache-e97ea5c4-163c-4870-9744-3f20cf57f53f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2080.278509] env[61868]: DEBUG oslo_concurrency.lockutils [req-b53b0f77-e049-4d84-baef-206f4fb53648 req-839ac467-52bf-4016-ad7b-b288225ca6ec service nova] Acquired lock "refresh_cache-e97ea5c4-163c-4870-9744-3f20cf57f53f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2080.278643] env[61868]: DEBUG nova.network.neutron [req-b53b0f77-e049-4d84-baef-206f4fb53648 req-839ac467-52bf-4016-ad7b-b288225ca6ec service nova] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Refreshing network info cache for port 7c9939e9-a11a-441c-b322-8a22016e0ae4 {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 2080.533135] env[61868]: DEBUG nova.network.neutron [req-b53b0f77-e049-4d84-baef-206f4fb53648 req-839ac467-52bf-4016-ad7b-b288225ca6ec service nova] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Updated VIF entry in instance network info cache for port 7c9939e9-a11a-441c-b322-8a22016e0ae4. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 2080.533549] env[61868]: DEBUG nova.network.neutron [req-b53b0f77-e049-4d84-baef-206f4fb53648 req-839ac467-52bf-4016-ad7b-b288225ca6ec service nova] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Updating instance_info_cache with network_info: [{"id": "7c9939e9-a11a-441c-b322-8a22016e0ae4", "address": "fa:16:3e:b8:e4:6e", "network": {"id": "6da51a65-4f1d-44b5-8bb8-b049cebe1cc1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2119750877-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "6f518980782c4dc5ac6efe31af19af16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c9939e9-a1", "ovs_interfaceid": "7c9939e9-a11a-441c-b322-8a22016e0ae4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2080.544455] env[61868]: DEBUG oslo_concurrency.lockutils [req-b53b0f77-e049-4d84-baef-206f4fb53648 req-839ac467-52bf-4016-ad7b-b288225ca6ec service nova] Releasing lock "refresh_cache-e97ea5c4-163c-4870-9744-3f20cf57f53f" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2080.749365] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41200, 'name': CreateVM_Task} progress is 99%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.249645] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41200, 'name': CreateVM_Task, 'duration_secs': 0.517383} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2081.249988] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2081.250535] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2081.250771] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2081.253772] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-828d16e6-705e-45a3-842e-64288c727976 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.285734] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Reconfiguring VM instance to enable vnc on port - 5901 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 2081.286043] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-61d2e7f5-584a-4227-95ff-7d29b5865613 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.301589] env[61868]: DEBUG oslo_vmware.api [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for the task: (returnval){ [ 2081.301589] env[61868]: value = "task-41201" [ 2081.301589] env[61868]: _type = "Task" [ 2081.301589] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2081.309575] env[61868]: DEBUG oslo_vmware.api [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': task-41201, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.811761] env[61868]: DEBUG oslo_vmware.api [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': task-41201, 'name': ReconfigVM_Task, 'duration_secs': 0.10826} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2081.812055] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Reconfigured VM instance to enable vnc on port - 5901 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 2081.812306] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.562s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2081.812569] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2081.812711] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2081.813123] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2081.813399] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b71f8bd7-942c-43a7-9d26-0aca9ea44e36 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.818962] env[61868]: DEBUG oslo_vmware.api [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for the task: (returnval){ [ 2081.818962] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]520b3f39-88fa-3a96-b3ad-0712be4c8d5d" [ 2081.818962] env[61868]: _type = "Task" [ 2081.818962] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2081.829454] env[61868]: DEBUG oslo_vmware.api [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]520b3f39-88fa-3a96-b3ad-0712be4c8d5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.328971] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2082.329315] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2082.329598] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2095.352517] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2100.352390] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2100.352843] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2100.352843] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 2100.373417] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2100.373575] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2100.373708] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2100.373860] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2100.373990] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2100.374200] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2100.374355] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2100.374482] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2100.374604] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2100.374722] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2100.374841] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 2105.370429] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2107.346716] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2107.368787] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2108.352016] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2108.362096] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2108.362324] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2108.362493] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2108.362647] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2108.363731] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf40c2f9-2d2c-4dfc-b2ee-5862afbba10c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.372385] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79fe69ff-6895-4052-9804-589691452282 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.386466] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f2b3d89-9785-4de5-89ca-907ce6018288 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.394066] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ff2112b-22b6-4f03-88cc-af38704ee7fd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.426841] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181938MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2108.427034] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2108.427212] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2108.493178] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance c6c6e502-a2aa-4f9c-be05-bf6b50078abf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2108.493336] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 36be944d-04bc-45cd-8019-173437f8ffa5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2108.493485] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance e453b684-a54b-46b3-b9ea-4ab9352965f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2108.493607] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ad095fd9-abd0-4c75-8d7c-10dcebc2caee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2108.493724] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance fea9d55d-d045-4d4e-b647-044e4729f21c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2108.493838] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 47821dd7-73ae-40eb-b7f2-7b656737cd1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2108.493952] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4a4e7cbe-dc5d-4643-b115-0142b5c978de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2108.494063] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8484d90b-13a3-41af-a88a-856a8770a4ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2108.494175] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 803c1598-d083-405d-80a1-6adf3fbd2f96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2108.494284] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance e97ea5c4-163c-4870-9744-3f20cf57f53f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2108.494465] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2108.495223] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2108.634264] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acd8b252-1a1d-46e2-995b-c93a3beae806 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.642946] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-686378f9-fc9f-47f4-a70c-78ce5db8614b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.672712] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4fa1d29-1a3a-4224-869b-f22ca059ba88 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.680430] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70d05db5-5a2f-4a1b-bdb8-5e367443e96c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.696403] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2108.707403] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2108.726313] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2108.726507] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.299s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2109.726676] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2110.351641] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2113.351181] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2113.351529] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2113.351618] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 2126.510137] env[61868]: WARNING oslo_vmware.rw_handles [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2126.510137] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2126.510137] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2126.510137] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2126.510137] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2126.510137] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 2126.510137] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2126.510137] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2126.510137] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2126.510137] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2126.510137] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2126.510137] env[61868]: ERROR oslo_vmware.rw_handles [ 2126.510832] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/12b94c69-da0a-44e0-9406-7577a9e38a14/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2126.512602] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2126.512855] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Copying Virtual Disk [datastore2] vmware_temp/12b94c69-da0a-44e0-9406-7577a9e38a14/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/12b94c69-da0a-44e0-9406-7577a9e38a14/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2126.513149] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1ce65934-ecb0-409e-9d60-7533507c67df {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.520375] env[61868]: DEBUG oslo_vmware.api [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Waiting for the task: (returnval){ [ 2126.520375] env[61868]: value = "task-41202" [ 2126.520375] env[61868]: _type = "Task" [ 2126.520375] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2126.529845] env[61868]: DEBUG oslo_vmware.api [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Task: {'id': task-41202, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2127.031663] env[61868]: DEBUG oslo_vmware.exceptions [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2127.032031] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2127.032639] env[61868]: ERROR nova.compute.manager [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2127.032639] env[61868]: Faults: ['InvalidArgument'] [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Traceback (most recent call last): [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] yield resources [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] self.driver.spawn(context, instance, image_meta, [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] self._fetch_image_if_missing(context, vi) [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] image_cache(vi, tmp_image_ds_loc) [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] vm_util.copy_virtual_disk( [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] session._wait_for_task(vmdk_copy_task) [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] return self.wait_for_task(task_ref) [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] return evt.wait() [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] result = hub.switch() [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] return self.greenlet.switch() [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] self.f(*self.args, **self.kw) [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] raise exceptions.translate_fault(task_info.error) [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Faults: ['InvalidArgument'] [ 2127.032639] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] [ 2127.033559] env[61868]: INFO nova.compute.manager [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Terminating instance [ 2127.035660] env[61868]: DEBUG oslo_concurrency.lockutils [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2127.035660] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2127.035660] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a29bacf-5367-4e4d-b657-aeb7fadd4670 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.038361] env[61868]: DEBUG nova.compute.manager [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2127.038569] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2127.039447] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d876b89a-d296-495d-ac5c-8f0fb3b67a4e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.049147] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2127.049433] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2c42b026-4397-4d41-9e17-1e4c99e7d9ce {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.051813] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2127.052086] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2127.053378] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e7c8880-5699-4b40-90bf-afe4a0c57c1e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.060056] env[61868]: DEBUG oslo_vmware.api [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Waiting for the task: (returnval){ [ 2127.060056] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52decb52-3169-3e06-aec2-e69ad7487874" [ 2127.060056] env[61868]: _type = "Task" [ 2127.060056] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2127.070668] env[61868]: DEBUG oslo_vmware.api [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52decb52-3169-3e06-aec2-e69ad7487874, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2127.118477] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2127.118825] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2127.118909] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Deleting the datastore file [datastore2] c6c6e502-a2aa-4f9c-be05-bf6b50078abf {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2127.119178] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6b7898c4-6107-4428-a4ea-5ade47366588 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.126601] env[61868]: DEBUG oslo_vmware.api [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Waiting for the task: (returnval){ [ 2127.126601] env[61868]: value = "task-41204" [ 2127.126601] env[61868]: _type = "Task" [ 2127.126601] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2127.134852] env[61868]: DEBUG oslo_vmware.api [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Task: {'id': task-41204, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2127.570336] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2127.570718] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Creating directory with path [datastore2] vmware_temp/e713ea5c-77cf-4618-b13f-568e591ecb22/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2127.570829] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ea4c652b-296c-4277-8db3-0589aa86d69f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.582437] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Created directory with path [datastore2] vmware_temp/e713ea5c-77cf-4618-b13f-568e591ecb22/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2127.582634] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Fetch image to [datastore2] vmware_temp/e713ea5c-77cf-4618-b13f-568e591ecb22/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2127.582804] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/e713ea5c-77cf-4618-b13f-568e591ecb22/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2127.583577] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f2bea95-14d2-4858-adee-618d3d7beaca {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.590257] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d785dcd5-38a6-4160-a7c9-2d192d887885 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.599285] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-967deef7-8ffd-41fb-af49-840860b170f9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.631996] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ec509f-4b8e-4d50-a50f-5e9d661fc387 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.639970] env[61868]: DEBUG oslo_vmware.api [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Task: {'id': task-41204, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12523} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2127.641483] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2127.641677] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2127.641852] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2127.642028] env[61868]: INFO nova.compute.manager [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2127.643995] env[61868]: DEBUG nova.compute.claims [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2127.644176] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2127.644388] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2127.646844] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-58a8cdb0-920a-47e0-bbce-33cd40429385 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.670327] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2127.723054] env[61868]: DEBUG oslo_vmware.rw_handles [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e713ea5c-77cf-4618-b13f-568e591ecb22/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2127.781002] env[61868]: DEBUG oslo_vmware.rw_handles [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2127.781223] env[61868]: DEBUG oslo_vmware.rw_handles [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e713ea5c-77cf-4618-b13f-568e591ecb22/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2127.858535] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9abaa5de-d613-4b1b-b7b8-19a8e70e434c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.866346] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b5a9e15-836d-41ea-862c-e07fecd2c7ab {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.895318] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df455f36-ae67-4049-9919-406350aa9195 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.902296] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6fd60df-8987-4aa5-9013-f2cd9106e020 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.914924] env[61868]: DEBUG nova.compute.provider_tree [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2127.923476] env[61868]: DEBUG nova.scheduler.client.report [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2127.940977] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.296s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2127.941598] env[61868]: ERROR nova.compute.manager [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2127.941598] env[61868]: Faults: ['InvalidArgument'] [ 2127.941598] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Traceback (most recent call last): [ 2127.941598] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2127.941598] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] self.driver.spawn(context, instance, image_meta, [ 2127.941598] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2127.941598] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2127.941598] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2127.941598] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] self._fetch_image_if_missing(context, vi) [ 2127.941598] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2127.941598] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] image_cache(vi, tmp_image_ds_loc) [ 2127.941598] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2127.941598] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] vm_util.copy_virtual_disk( [ 2127.941598] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2127.941598] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] session._wait_for_task(vmdk_copy_task) [ 2127.941598] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2127.941598] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] return self.wait_for_task(task_ref) [ 2127.941598] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2127.941598] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] return evt.wait() [ 2127.941598] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2127.941598] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] result = hub.switch() [ 2127.941598] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2127.941598] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] return self.greenlet.switch() [ 2127.941598] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2127.941598] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] self.f(*self.args, **self.kw) [ 2127.941598] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2127.941598] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] raise exceptions.translate_fault(task_info.error) [ 2127.941598] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2127.941598] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Faults: ['InvalidArgument'] [ 2127.941598] env[61868]: ERROR nova.compute.manager [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] [ 2127.942556] env[61868]: DEBUG nova.compute.utils [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2127.943918] env[61868]: DEBUG nova.compute.manager [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Build of instance c6c6e502-a2aa-4f9c-be05-bf6b50078abf was re-scheduled: A specified parameter was not correct: fileType [ 2127.943918] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2127.944299] env[61868]: DEBUG nova.compute.manager [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2127.944475] env[61868]: DEBUG nova.compute.manager [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2127.944648] env[61868]: DEBUG nova.compute.manager [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2127.944838] env[61868]: DEBUG nova.network.neutron [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2128.214851] env[61868]: DEBUG nova.network.neutron [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2128.228627] env[61868]: INFO nova.compute.manager [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Took 0.28 seconds to deallocate network for instance. [ 2128.338772] env[61868]: INFO nova.scheduler.client.report [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Deleted allocations for instance c6c6e502-a2aa-4f9c-be05-bf6b50078abf [ 2128.361394] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f78e0d9f-7c95-445e-a5af-94c1d1276ed1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Lock "c6c6e502-a2aa-4f9c-be05-bf6b50078abf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 543.492s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2128.361394] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "c6c6e502-a2aa-4f9c-be05-bf6b50078abf" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 368.705s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2128.361394] env[61868]: INFO nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] During sync_power_state the instance has a pending task (spawning). Skip. [ 2128.361606] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "c6c6e502-a2aa-4f9c-be05-bf6b50078abf" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2128.361950] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ea71951-c068-45ed-be7c-44f636d745b1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Lock "c6c6e502-a2aa-4f9c-be05-bf6b50078abf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 347.235s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2128.362188] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ea71951-c068-45ed-be7c-44f636d745b1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Acquiring lock "c6c6e502-a2aa-4f9c-be05-bf6b50078abf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2128.362517] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ea71951-c068-45ed-be7c-44f636d745b1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Lock "c6c6e502-a2aa-4f9c-be05-bf6b50078abf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2128.362728] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ea71951-c068-45ed-be7c-44f636d745b1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Lock "c6c6e502-a2aa-4f9c-be05-bf6b50078abf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2128.365099] env[61868]: INFO nova.compute.manager [None req-7ea71951-c068-45ed-be7c-44f636d745b1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Terminating instance [ 2128.367261] env[61868]: DEBUG nova.compute.manager [None req-7ea71951-c068-45ed-be7c-44f636d745b1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2128.367582] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7ea71951-c068-45ed-be7c-44f636d745b1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2128.368277] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8fc3ff47-8673-4d23-a890-5700312dfdac {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.381384] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51c548ee-8e04-4884-84cb-5389746400f2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.408886] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-7ea71951-c068-45ed-be7c-44f636d745b1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c6c6e502-a2aa-4f9c-be05-bf6b50078abf could not be found. [ 2128.409122] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7ea71951-c068-45ed-be7c-44f636d745b1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2128.409304] env[61868]: INFO nova.compute.manager [None req-7ea71951-c068-45ed-be7c-44f636d745b1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2128.409576] env[61868]: DEBUG oslo.service.loopingcall [None req-7ea71951-c068-45ed-be7c-44f636d745b1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2128.409822] env[61868]: DEBUG nova.compute.manager [-] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2128.409924] env[61868]: DEBUG nova.network.neutron [-] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2128.436296] env[61868]: DEBUG nova.network.neutron [-] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2128.445834] env[61868]: INFO nova.compute.manager [-] [instance: c6c6e502-a2aa-4f9c-be05-bf6b50078abf] Took 0.04 seconds to deallocate network for instance. [ 2128.538248] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7ea71951-c068-45ed-be7c-44f636d745b1 tempest-ServersNegativeTestJSON-1897410890 tempest-ServersNegativeTestJSON-1897410890-project-member] Lock "c6c6e502-a2aa-4f9c-be05-bf6b50078abf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.176s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2157.352424] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2162.351275] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2162.351579] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2162.351616] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 2162.371135] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2162.371348] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2162.371451] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2162.371580] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2162.371705] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2162.371829] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2162.371971] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2162.372120] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2162.372244] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2162.372367] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 2166.368710] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2169.352044] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2169.352436] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2169.362911] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2169.363201] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2169.363290] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2169.363451] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2169.364652] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2609b02f-3fb0-41d8-83e4-a53f154183d1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.373757] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e156760-5ec0-4369-826f-f99d4e96bf63 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.389886] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a5f8e03-ce30-45e6-866a-2c02b9c111aa {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.396972] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b1b441e-2c97-45e7-84f7-9e19bf773671 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.426092] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181940MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2169.426250] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2169.426446] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2169.489696] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 36be944d-04bc-45cd-8019-173437f8ffa5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2169.489862] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance e453b684-a54b-46b3-b9ea-4ab9352965f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2169.489992] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ad095fd9-abd0-4c75-8d7c-10dcebc2caee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2169.490116] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance fea9d55d-d045-4d4e-b647-044e4729f21c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2169.490273] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 47821dd7-73ae-40eb-b7f2-7b656737cd1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2169.490406] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4a4e7cbe-dc5d-4643-b115-0142b5c978de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2169.490527] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8484d90b-13a3-41af-a88a-856a8770a4ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2169.490645] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 803c1598-d083-405d-80a1-6adf3fbd2f96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2169.490759] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance e97ea5c4-163c-4870-9744-3f20cf57f53f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2169.490945] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2169.491084] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2169.606893] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-453869aa-8dae-4e79-993b-0e6501d9e311 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.615108] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af70c9a4-4c6f-45c7-877a-b11b3d93d979 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.646065] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef123ba9-0f1b-4e1f-8647-c05f1b5557f1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.653879] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798fba17-1535-404b-9c50-83d3da557d29 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.667164] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2169.676480] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2169.693820] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2169.694056] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.268s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2170.692716] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2172.350899] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2173.351747] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2173.352066] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2173.352162] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 2174.863717] env[61868]: WARNING oslo_vmware.rw_handles [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2174.863717] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2174.863717] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2174.863717] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2174.863717] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2174.863717] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 2174.863717] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2174.863717] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2174.863717] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2174.863717] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2174.863717] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2174.863717] env[61868]: ERROR oslo_vmware.rw_handles [ 2174.864523] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/e713ea5c-77cf-4618-b13f-568e591ecb22/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2174.866025] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2174.866265] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Copying Virtual Disk [datastore2] vmware_temp/e713ea5c-77cf-4618-b13f-568e591ecb22/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/e713ea5c-77cf-4618-b13f-568e591ecb22/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2174.866545] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-53cce413-0d19-4d4d-ac67-3075c22a8edc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.874990] env[61868]: DEBUG oslo_vmware.api [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Waiting for the task: (returnval){ [ 2174.874990] env[61868]: value = "task-41205" [ 2174.874990] env[61868]: _type = "Task" [ 2174.874990] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2174.883339] env[61868]: DEBUG oslo_vmware.api [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Task: {'id': task-41205, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2175.385459] env[61868]: DEBUG oslo_vmware.exceptions [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2175.385718] env[61868]: DEBUG oslo_concurrency.lockutils [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2175.386313] env[61868]: ERROR nova.compute.manager [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2175.386313] env[61868]: Faults: ['InvalidArgument'] [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Traceback (most recent call last): [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] yield resources [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] self.driver.spawn(context, instance, image_meta, [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] self._fetch_image_if_missing(context, vi) [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] image_cache(vi, tmp_image_ds_loc) [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] vm_util.copy_virtual_disk( [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] session._wait_for_task(vmdk_copy_task) [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] return self.wait_for_task(task_ref) [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] return evt.wait() [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] result = hub.switch() [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] return self.greenlet.switch() [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] self.f(*self.args, **self.kw) [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] raise exceptions.translate_fault(task_info.error) [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Faults: ['InvalidArgument'] [ 2175.386313] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] [ 2175.389259] env[61868]: INFO nova.compute.manager [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Terminating instance [ 2175.389259] env[61868]: DEBUG oslo_concurrency.lockutils [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2175.389259] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2175.389259] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-55a1fd9b-56c3-4a30-86bb-7ec30c5a85c1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.390916] env[61868]: DEBUG nova.compute.manager [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2175.391105] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2175.391832] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff151a7-a9b2-403e-8a25-b8ee29f72f93 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.398807] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2175.399018] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-51f5c3d2-0d73-48ae-a12f-ecb4497db21a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.401417] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2175.401583] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2175.402531] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04b1918d-4298-4e2b-93f5-e8c9cbdc6f9f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.407202] env[61868]: DEBUG oslo_vmware.api [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for the task: (returnval){ [ 2175.407202] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]529fa93b-6e42-b151-37db-810b7cc9bc10" [ 2175.407202] env[61868]: _type = "Task" [ 2175.407202] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2175.414518] env[61868]: DEBUG oslo_vmware.api [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]529fa93b-6e42-b151-37db-810b7cc9bc10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2175.467576] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2175.467822] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2175.467975] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Deleting the datastore file [datastore2] 36be944d-04bc-45cd-8019-173437f8ffa5 {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2175.468277] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6a1a2e65-4d18-4bbc-905d-0903b946b00d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.475703] env[61868]: DEBUG oslo_vmware.api [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Waiting for the task: (returnval){ [ 2175.475703] env[61868]: value = "task-41207" [ 2175.475703] env[61868]: _type = "Task" [ 2175.475703] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2175.484087] env[61868]: DEBUG oslo_vmware.api [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Task: {'id': task-41207, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2175.918222] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2175.918610] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Creating directory with path [datastore2] vmware_temp/21a2bca2-c7ac-4602-98f8-0649b8e281a0/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2175.918750] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-55384ee2-13fa-4903-af04-8ce35b51331c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.930785] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Created directory with path [datastore2] vmware_temp/21a2bca2-c7ac-4602-98f8-0649b8e281a0/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2175.930978] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Fetch image to [datastore2] vmware_temp/21a2bca2-c7ac-4602-98f8-0649b8e281a0/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2175.931150] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/21a2bca2-c7ac-4602-98f8-0649b8e281a0/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2175.931935] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ad20da-df1d-4b53-9a06-6f5963e0ebfe {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.938513] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb0e396-3b08-43fa-a4d8-68c27763830a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.947754] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4b0205e-10ba-4aa4-bd59-ab999c4f23bb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.981702] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34aa7b52-94d8-462e-b99d-7c966b85eced {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.989352] env[61868]: DEBUG oslo_vmware.api [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Task: {'id': task-41207, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.093932} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2175.990783] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2175.990971] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2175.991148] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2175.991356] env[61868]: INFO nova.compute.manager [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2175.993124] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-22eeaa0e-603f-4fed-81c2-f2fbeb59be7c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.995091] env[61868]: DEBUG nova.compute.claims [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2175.995263] env[61868]: DEBUG oslo_concurrency.lockutils [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2175.995507] env[61868]: DEBUG oslo_concurrency.lockutils [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2176.019729] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2176.074944] env[61868]: DEBUG oslo_vmware.rw_handles [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/21a2bca2-c7ac-4602-98f8-0649b8e281a0/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2176.159159] env[61868]: DEBUG oslo_vmware.rw_handles [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2176.159507] env[61868]: DEBUG oslo_vmware.rw_handles [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/21a2bca2-c7ac-4602-98f8-0649b8e281a0/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2176.237835] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17c875ac-f579-4edf-ac36-50a538ee4603 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.246168] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c7b8a5c-f68d-4742-9afc-e6f9b4bf4905 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.275652] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d38c82-08d1-4fcc-b274-e951d50edb8c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.283478] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b4eb100-63b0-4642-8b5e-4caab76325e2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.297957] env[61868]: DEBUG nova.compute.provider_tree [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2176.306990] env[61868]: DEBUG nova.scheduler.client.report [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2176.323577] env[61868]: DEBUG oslo_concurrency.lockutils [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.328s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2176.324146] env[61868]: ERROR nova.compute.manager [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2176.324146] env[61868]: Faults: ['InvalidArgument'] [ 2176.324146] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Traceback (most recent call last): [ 2176.324146] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2176.324146] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] self.driver.spawn(context, instance, image_meta, [ 2176.324146] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2176.324146] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2176.324146] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2176.324146] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] self._fetch_image_if_missing(context, vi) [ 2176.324146] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2176.324146] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] image_cache(vi, tmp_image_ds_loc) [ 2176.324146] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2176.324146] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] vm_util.copy_virtual_disk( [ 2176.324146] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2176.324146] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] session._wait_for_task(vmdk_copy_task) [ 2176.324146] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2176.324146] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] return self.wait_for_task(task_ref) [ 2176.324146] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2176.324146] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] return evt.wait() [ 2176.324146] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2176.324146] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] result = hub.switch() [ 2176.324146] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2176.324146] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] return self.greenlet.switch() [ 2176.324146] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2176.324146] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] self.f(*self.args, **self.kw) [ 2176.324146] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2176.324146] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] raise exceptions.translate_fault(task_info.error) [ 2176.324146] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2176.324146] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Faults: ['InvalidArgument'] [ 2176.324146] env[61868]: ERROR nova.compute.manager [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] [ 2176.325165] env[61868]: DEBUG nova.compute.utils [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2176.326330] env[61868]: DEBUG nova.compute.manager [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Build of instance 36be944d-04bc-45cd-8019-173437f8ffa5 was re-scheduled: A specified parameter was not correct: fileType [ 2176.326330] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2176.326864] env[61868]: DEBUG nova.compute.manager [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2176.326989] env[61868]: DEBUG nova.compute.manager [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2176.327162] env[61868]: DEBUG nova.compute.manager [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2176.327328] env[61868]: DEBUG nova.network.neutron [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2176.591637] env[61868]: DEBUG nova.network.neutron [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2176.605807] env[61868]: INFO nova.compute.manager [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Took 0.28 seconds to deallocate network for instance. [ 2176.701743] env[61868]: INFO nova.scheduler.client.report [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Deleted allocations for instance 36be944d-04bc-45cd-8019-173437f8ffa5 [ 2176.725639] env[61868]: DEBUG oslo_concurrency.lockutils [None req-eecad92e-0e7a-48ab-9d44-020c1433ced7 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "36be944d-04bc-45cd-8019-173437f8ffa5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 584.302s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2176.725816] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "36be944d-04bc-45cd-8019-173437f8ffa5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 417.070s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2176.725914] env[61868]: INFO nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] During sync_power_state the instance has a pending task (spawning). Skip. [ 2176.726084] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "36be944d-04bc-45cd-8019-173437f8ffa5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2176.726325] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ffbf63ab-7ea2-4f61-a3ae-bed6eaba7915 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "36be944d-04bc-45cd-8019-173437f8ffa5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 389.107s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2176.726546] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ffbf63ab-7ea2-4f61-a3ae-bed6eaba7915 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "36be944d-04bc-45cd-8019-173437f8ffa5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2176.726753] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ffbf63ab-7ea2-4f61-a3ae-bed6eaba7915 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "36be944d-04bc-45cd-8019-173437f8ffa5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2176.726917] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ffbf63ab-7ea2-4f61-a3ae-bed6eaba7915 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "36be944d-04bc-45cd-8019-173437f8ffa5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2176.729132] env[61868]: INFO nova.compute.manager [None req-ffbf63ab-7ea2-4f61-a3ae-bed6eaba7915 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Terminating instance [ 2176.731026] env[61868]: DEBUG nova.compute.manager [None req-ffbf63ab-7ea2-4f61-a3ae-bed6eaba7915 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2176.731296] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-ffbf63ab-7ea2-4f61-a3ae-bed6eaba7915 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2176.731791] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3547d6b5-63ff-47c7-a83f-e3b64d083071 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.741510] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2244ec4e-5040-4f22-83dd-526cb4f92760 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.771325] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-ffbf63ab-7ea2-4f61-a3ae-bed6eaba7915 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 36be944d-04bc-45cd-8019-173437f8ffa5 could not be found. [ 2176.771614] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-ffbf63ab-7ea2-4f61-a3ae-bed6eaba7915 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2176.771843] env[61868]: INFO nova.compute.manager [None req-ffbf63ab-7ea2-4f61-a3ae-bed6eaba7915 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2176.772196] env[61868]: DEBUG oslo.service.loopingcall [None req-ffbf63ab-7ea2-4f61-a3ae-bed6eaba7915 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2176.772472] env[61868]: DEBUG nova.compute.manager [-] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2176.772608] env[61868]: DEBUG nova.network.neutron [-] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2176.797103] env[61868]: DEBUG nova.network.neutron [-] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2176.805383] env[61868]: INFO nova.compute.manager [-] [instance: 36be944d-04bc-45cd-8019-173437f8ffa5] Took 0.03 seconds to deallocate network for instance. [ 2176.895606] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ffbf63ab-7ea2-4f61-a3ae-bed6eaba7915 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "36be944d-04bc-45cd-8019-173437f8ffa5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.169s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2180.326242] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6bb6a96b-3725-4744-b18d-f871dbb0ba0a tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "4a4e7cbe-dc5d-4643-b115-0142b5c978de" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2205.546172] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2d1312ea-439e-4438-b08e-8445e6bba006 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "8484d90b-13a3-41af-a88a-856a8770a4ce" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2217.352763] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2221.206944] env[61868]: WARNING oslo_vmware.rw_handles [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2221.206944] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2221.206944] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2221.206944] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2221.206944] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2221.206944] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 2221.206944] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2221.206944] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2221.206944] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2221.206944] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2221.206944] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2221.206944] env[61868]: ERROR oslo_vmware.rw_handles [ 2221.207615] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/21a2bca2-c7ac-4602-98f8-0649b8e281a0/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2221.209701] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2221.209973] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Copying Virtual Disk [datastore2] vmware_temp/21a2bca2-c7ac-4602-98f8-0649b8e281a0/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/21a2bca2-c7ac-4602-98f8-0649b8e281a0/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2221.210306] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c001256b-12cd-4c83-a355-80ae1fd9684c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.219419] env[61868]: DEBUG oslo_vmware.api [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for the task: (returnval){ [ 2221.219419] env[61868]: value = "task-41208" [ 2221.219419] env[61868]: _type = "Task" [ 2221.219419] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2221.227986] env[61868]: DEBUG oslo_vmware.api [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': task-41208, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2221.729509] env[61868]: DEBUG oslo_vmware.exceptions [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2221.729770] env[61868]: DEBUG oslo_concurrency.lockutils [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2221.730331] env[61868]: ERROR nova.compute.manager [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2221.730331] env[61868]: Faults: ['InvalidArgument'] [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Traceback (most recent call last): [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] yield resources [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] self.driver.spawn(context, instance, image_meta, [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] self._fetch_image_if_missing(context, vi) [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] image_cache(vi, tmp_image_ds_loc) [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] vm_util.copy_virtual_disk( [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] session._wait_for_task(vmdk_copy_task) [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] return self.wait_for_task(task_ref) [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] return evt.wait() [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] result = hub.switch() [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] return self.greenlet.switch() [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] self.f(*self.args, **self.kw) [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] raise exceptions.translate_fault(task_info.error) [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Faults: ['InvalidArgument'] [ 2221.730331] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] [ 2221.731446] env[61868]: INFO nova.compute.manager [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Terminating instance [ 2221.732223] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2221.732489] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2221.732737] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e09dd3a-d22e-40d5-a260-ff10664024f6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.734898] env[61868]: DEBUG nova.compute.manager [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2221.735165] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2221.735883] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a18c9e0f-1f08-4992-ac1b-3cb795c14f6d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.742826] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2221.743070] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6585563c-c63a-41ec-8422-69efdb38494e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.745339] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2221.745517] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2221.746520] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-406296c8-1a1f-4887-8a85-5f3aa8e29b0b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.751209] env[61868]: DEBUG oslo_vmware.api [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Waiting for the task: (returnval){ [ 2221.751209] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]522f2107-c719-32ad-a413-a752782884f5" [ 2221.751209] env[61868]: _type = "Task" [ 2221.751209] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2221.759034] env[61868]: DEBUG oslo_vmware.api [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]522f2107-c719-32ad-a413-a752782884f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2221.816372] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2221.816595] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2221.816820] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Deleting the datastore file [datastore2] e453b684-a54b-46b3-b9ea-4ab9352965f7 {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2221.817061] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3b440ab6-325b-417a-80b0-354acedbae10 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.823771] env[61868]: DEBUG oslo_vmware.api [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for the task: (returnval){ [ 2221.823771] env[61868]: value = "task-41210" [ 2221.823771] env[61868]: _type = "Task" [ 2221.823771] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2221.833147] env[61868]: DEBUG oslo_vmware.api [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': task-41210, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2222.261495] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2222.261794] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Creating directory with path [datastore2] vmware_temp/0835f768-4109-4e9f-a223-4622fe708fcc/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2222.261976] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-150d82a7-2bdf-4a84-9cbf-7b459b54d213 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.273491] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Created directory with path [datastore2] vmware_temp/0835f768-4109-4e9f-a223-4622fe708fcc/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2222.273689] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Fetch image to [datastore2] vmware_temp/0835f768-4109-4e9f-a223-4622fe708fcc/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2222.273863] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/0835f768-4109-4e9f-a223-4622fe708fcc/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2222.274598] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cba51d69-1e43-4187-85c1-0cdf7322a2c6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.281802] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eedd4fc-a54f-4615-9bad-314cb1c13c8b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.290806] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-161c33a9-90c6-4197-8eb5-c86814873740 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.320934] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-576f5aeb-2538-48c9-a6c2-8c9d0538993d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.329359] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-391a9fcc-e1e0-4b1a-991f-80c06d7345a0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.334051] env[61868]: DEBUG oslo_vmware.api [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': task-41210, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075246} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2222.334636] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2222.334832] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2222.335016] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2222.335189] env[61868]: INFO nova.compute.manager [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2222.337313] env[61868]: DEBUG nova.compute.claims [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2222.337521] env[61868]: DEBUG oslo_concurrency.lockutils [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2222.337770] env[61868]: DEBUG oslo_concurrency.lockutils [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2222.351747] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2222.351918] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2222.352067] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 2222.356318] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2222.371371] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2222.371559] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2222.371695] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2222.371823] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2222.372049] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2222.372179] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2222.372458] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2222.372458] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2222.372584] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 2222.417194] env[61868]: DEBUG oslo_vmware.rw_handles [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0835f768-4109-4e9f-a223-4622fe708fcc/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2222.480635] env[61868]: DEBUG oslo_vmware.rw_handles [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2222.480848] env[61868]: DEBUG oslo_vmware.rw_handles [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0835f768-4109-4e9f-a223-4622fe708fcc/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2222.550753] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73bb627d-8daa-424b-b16e-ab5038c96283 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.558867] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1335cc4-288f-4833-ac98-cf421f222c97 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.590643] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00692fa6-e5fc-464b-aba2-4760301e995d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.598669] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5444979c-40d6-4f08-a85d-7fcd9d9413c8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.612301] env[61868]: DEBUG nova.compute.provider_tree [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2222.620500] env[61868]: DEBUG nova.scheduler.client.report [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2222.636354] env[61868]: DEBUG oslo_concurrency.lockutils [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.298s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2222.636970] env[61868]: ERROR nova.compute.manager [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2222.636970] env[61868]: Faults: ['InvalidArgument'] [ 2222.636970] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Traceback (most recent call last): [ 2222.636970] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2222.636970] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] self.driver.spawn(context, instance, image_meta, [ 2222.636970] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2222.636970] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2222.636970] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2222.636970] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] self._fetch_image_if_missing(context, vi) [ 2222.636970] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2222.636970] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] image_cache(vi, tmp_image_ds_loc) [ 2222.636970] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2222.636970] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] vm_util.copy_virtual_disk( [ 2222.636970] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2222.636970] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] session._wait_for_task(vmdk_copy_task) [ 2222.636970] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2222.636970] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] return self.wait_for_task(task_ref) [ 2222.636970] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2222.636970] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] return evt.wait() [ 2222.636970] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2222.636970] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] result = hub.switch() [ 2222.636970] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2222.636970] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] return self.greenlet.switch() [ 2222.636970] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2222.636970] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] self.f(*self.args, **self.kw) [ 2222.636970] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2222.636970] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] raise exceptions.translate_fault(task_info.error) [ 2222.636970] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2222.636970] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Faults: ['InvalidArgument'] [ 2222.636970] env[61868]: ERROR nova.compute.manager [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] [ 2222.637930] env[61868]: DEBUG nova.compute.utils [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2222.639456] env[61868]: DEBUG nova.compute.manager [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Build of instance e453b684-a54b-46b3-b9ea-4ab9352965f7 was re-scheduled: A specified parameter was not correct: fileType [ 2222.639456] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2222.639842] env[61868]: DEBUG nova.compute.manager [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2222.640038] env[61868]: DEBUG nova.compute.manager [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2222.640221] env[61868]: DEBUG nova.compute.manager [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2222.640393] env[61868]: DEBUG nova.network.neutron [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2222.901642] env[61868]: DEBUG nova.network.neutron [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2222.914563] env[61868]: INFO nova.compute.manager [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Took 0.27 seconds to deallocate network for instance. [ 2223.013399] env[61868]: INFO nova.scheduler.client.report [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Deleted allocations for instance e453b684-a54b-46b3-b9ea-4ab9352965f7 [ 2223.032098] env[61868]: DEBUG oslo_concurrency.lockutils [None req-11126721-00e1-467d-8944-084a1fed6530 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "e453b684-a54b-46b3-b9ea-4ab9352965f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 606.514s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2223.032420] env[61868]: DEBUG oslo_concurrency.lockutils [None req-de9eb150-bf26-4cc0-8680-1db67e0672f3 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "e453b684-a54b-46b3-b9ea-4ab9352965f7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 410.573s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2223.033130] env[61868]: DEBUG oslo_concurrency.lockutils [None req-de9eb150-bf26-4cc0-8680-1db67e0672f3 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "e453b684-a54b-46b3-b9ea-4ab9352965f7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2223.033130] env[61868]: DEBUG oslo_concurrency.lockutils [None req-de9eb150-bf26-4cc0-8680-1db67e0672f3 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "e453b684-a54b-46b3-b9ea-4ab9352965f7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2223.033130] env[61868]: DEBUG oslo_concurrency.lockutils [None req-de9eb150-bf26-4cc0-8680-1db67e0672f3 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "e453b684-a54b-46b3-b9ea-4ab9352965f7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2223.034947] env[61868]: INFO nova.compute.manager [None req-de9eb150-bf26-4cc0-8680-1db67e0672f3 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Terminating instance [ 2223.037065] env[61868]: DEBUG nova.compute.manager [None req-de9eb150-bf26-4cc0-8680-1db67e0672f3 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2223.037320] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-de9eb150-bf26-4cc0-8680-1db67e0672f3 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2223.037970] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a16cde15-f753-46cd-a5c8-f9816b9049b4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.047443] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d06285f-632c-4ea2-88d8-9b5c3c9b8360 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.080979] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-de9eb150-bf26-4cc0-8680-1db67e0672f3 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e453b684-a54b-46b3-b9ea-4ab9352965f7 could not be found. [ 2223.081194] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-de9eb150-bf26-4cc0-8680-1db67e0672f3 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2223.081392] env[61868]: INFO nova.compute.manager [None req-de9eb150-bf26-4cc0-8680-1db67e0672f3 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2223.081636] env[61868]: DEBUG oslo.service.loopingcall [None req-de9eb150-bf26-4cc0-8680-1db67e0672f3 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2223.081949] env[61868]: DEBUG nova.compute.manager [-] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2223.082066] env[61868]: DEBUG nova.network.neutron [-] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2223.110519] env[61868]: DEBUG nova.network.neutron [-] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2223.120417] env[61868]: INFO nova.compute.manager [-] [instance: e453b684-a54b-46b3-b9ea-4ab9352965f7] Took 0.04 seconds to deallocate network for instance. [ 2223.225117] env[61868]: DEBUG oslo_concurrency.lockutils [None req-de9eb150-bf26-4cc0-8680-1db67e0672f3 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "e453b684-a54b-46b3-b9ea-4ab9352965f7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.193s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2226.369837] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2228.346153] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2229.351103] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2231.352070] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2231.352561] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2231.364717] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2231.364956] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2231.365125] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2231.365282] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2231.366444] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e5cdcd-4df8-4d0a-b29b-df371a792b51 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.375749] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ed7b2e-495f-4539-bd80-256da6d29de5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.390364] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6209864-f279-4c2c-ac28-618695692fb2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.397376] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a1c38d9-c80a-45b4-bb98-38b358378a9c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.428111] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181944MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2231.428356] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2231.428479] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2231.485853] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance ad095fd9-abd0-4c75-8d7c-10dcebc2caee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2231.486012] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance fea9d55d-d045-4d4e-b647-044e4729f21c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2231.486126] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 47821dd7-73ae-40eb-b7f2-7b656737cd1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2231.486248] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4a4e7cbe-dc5d-4643-b115-0142b5c978de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2231.486369] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8484d90b-13a3-41af-a88a-856a8770a4ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2231.486487] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 803c1598-d083-405d-80a1-6adf3fbd2f96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2231.486604] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance e97ea5c4-163c-4870-9744-3f20cf57f53f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2231.486819] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2231.487040] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2231.586191] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c40b26a-f115-4e8b-8bea-c9ad8468bb94 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.594400] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e6e5e6b-7ce2-41b3-af1a-85318c41c52f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.626047] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f2ec32b-0044-4f43-9e4c-1b6c47229803 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.634044] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eddb7f21-e3b7-42f6-b3d1-14620ced4169 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.647878] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2231.656213] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2231.676552] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2231.676770] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.248s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2232.675895] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2233.351681] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2233.351947] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2233.352109] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 2263.508206] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9396c59e-502a-4c15-a673-00f98b2e6eff tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "803c1598-d083-405d-80a1-6adf3fbd2f96" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2270.171110] env[61868]: WARNING oslo_vmware.rw_handles [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2270.171110] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2270.171110] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2270.171110] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2270.171110] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2270.171110] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 2270.171110] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2270.171110] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2270.171110] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2270.171110] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2270.171110] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2270.171110] env[61868]: ERROR oslo_vmware.rw_handles [ 2270.171745] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/0835f768-4109-4e9f-a223-4622fe708fcc/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2270.173466] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2270.173720] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Copying Virtual Disk [datastore2] vmware_temp/0835f768-4109-4e9f-a223-4622fe708fcc/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/0835f768-4109-4e9f-a223-4622fe708fcc/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2270.174003] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d1f44a40-052d-411f-a8d6-baf5c362bf38 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.182350] env[61868]: DEBUG oslo_vmware.api [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Waiting for the task: (returnval){ [ 2270.182350] env[61868]: value = "task-41211" [ 2270.182350] env[61868]: _type = "Task" [ 2270.182350] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2270.191348] env[61868]: DEBUG oslo_vmware.api [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Task: {'id': task-41211, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2270.692852] env[61868]: DEBUG oslo_vmware.exceptions [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2270.693131] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2270.693690] env[61868]: ERROR nova.compute.manager [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2270.693690] env[61868]: Faults: ['InvalidArgument'] [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Traceback (most recent call last): [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] yield resources [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] self.driver.spawn(context, instance, image_meta, [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] self._fetch_image_if_missing(context, vi) [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] image_cache(vi, tmp_image_ds_loc) [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] vm_util.copy_virtual_disk( [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] session._wait_for_task(vmdk_copy_task) [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] return self.wait_for_task(task_ref) [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] return evt.wait() [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] result = hub.switch() [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] return self.greenlet.switch() [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] self.f(*self.args, **self.kw) [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] raise exceptions.translate_fault(task_info.error) [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Faults: ['InvalidArgument'] [ 2270.693690] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] [ 2270.694706] env[61868]: INFO nova.compute.manager [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Terminating instance [ 2270.696278] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2270.696604] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2270.696960] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-827f5c3e-3178-4dac-b365-032b1c408886 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.699769] env[61868]: DEBUG nova.compute.manager [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2270.699958] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2270.700738] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a4ba713-49ff-4195-bdcf-d4274d5bd2b4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.708556] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2270.709532] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-18335fab-90e4-4360-9ad7-5e80a968110f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.710975] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2270.711149] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2270.711856] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8bee9574-f311-4ce9-82a2-8b9c9c69b1e3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.717049] env[61868]: DEBUG oslo_vmware.api [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for the task: (returnval){ [ 2270.717049] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]5245fb10-2232-9255-4d74-c577925acb27" [ 2270.717049] env[61868]: _type = "Task" [ 2270.717049] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2270.724658] env[61868]: DEBUG oslo_vmware.api [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]5245fb10-2232-9255-4d74-c577925acb27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2270.775709] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2270.775910] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2270.776121] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Deleting the datastore file [datastore2] ad095fd9-abd0-4c75-8d7c-10dcebc2caee {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2270.776410] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-116bc74c-9025-4daf-a02d-4168dc124e1a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.783085] env[61868]: DEBUG oslo_vmware.api [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Waiting for the task: (returnval){ [ 2270.783085] env[61868]: value = "task-41213" [ 2270.783085] env[61868]: _type = "Task" [ 2270.783085] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2270.791518] env[61868]: DEBUG oslo_vmware.api [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Task: {'id': task-41213, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2271.227664] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2271.227988] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Creating directory with path [datastore2] vmware_temp/1b012d7a-3441-4404-ab11-74bd0b975927/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2271.228253] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6f1923f3-004e-4d88-82ac-5b17115d6782 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.240865] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Created directory with path [datastore2] vmware_temp/1b012d7a-3441-4404-ab11-74bd0b975927/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2271.241088] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Fetch image to [datastore2] vmware_temp/1b012d7a-3441-4404-ab11-74bd0b975927/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2271.241306] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/1b012d7a-3441-4404-ab11-74bd0b975927/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2271.242095] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bae33d4-9422-492d-815b-c00e5b8ded2e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.249702] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-908b567e-922d-4077-b2eb-155a343887cb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.259370] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-708a288d-f4d7-4389-bbdf-6d383b2b221b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.295567] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0564e3a0-43ca-490c-bae1-a62b3716b3d1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.302875] env[61868]: DEBUG oslo_vmware.api [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Task: {'id': task-41213, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073171} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2271.304475] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2271.304664] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2271.304837] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2271.305013] env[61868]: INFO nova.compute.manager [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2271.306763] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7bc5562e-4b91-49f3-9a9f-14372045244f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.308722] env[61868]: DEBUG nova.compute.claims [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2271.308895] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2271.309112] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2271.332640] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2271.383673] env[61868]: DEBUG oslo_vmware.rw_handles [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1b012d7a-3441-4404-ab11-74bd0b975927/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2271.439066] env[61868]: DEBUG oslo_vmware.rw_handles [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2271.439194] env[61868]: DEBUG oslo_vmware.rw_handles [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1b012d7a-3441-4404-ab11-74bd0b975927/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2271.494322] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc4d9ecb-d624-483c-8909-86b6d36e21ef {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.502252] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eaf0b98-8bf5-46dc-858c-f43eb8f7dfe2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.531355] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fd56c56-fe98-463d-b39a-6680f0c12488 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.539082] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0120e38a-ac35-4b84-bb98-f11a9802f33f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.553556] env[61868]: DEBUG nova.compute.provider_tree [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2271.562623] env[61868]: DEBUG nova.scheduler.client.report [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2271.578357] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.269s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2271.578902] env[61868]: ERROR nova.compute.manager [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2271.578902] env[61868]: Faults: ['InvalidArgument'] [ 2271.578902] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Traceback (most recent call last): [ 2271.578902] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2271.578902] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] self.driver.spawn(context, instance, image_meta, [ 2271.578902] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2271.578902] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2271.578902] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2271.578902] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] self._fetch_image_if_missing(context, vi) [ 2271.578902] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2271.578902] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] image_cache(vi, tmp_image_ds_loc) [ 2271.578902] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2271.578902] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] vm_util.copy_virtual_disk( [ 2271.578902] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2271.578902] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] session._wait_for_task(vmdk_copy_task) [ 2271.578902] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2271.578902] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] return self.wait_for_task(task_ref) [ 2271.578902] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2271.578902] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] return evt.wait() [ 2271.578902] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2271.578902] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] result = hub.switch() [ 2271.578902] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2271.578902] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] return self.greenlet.switch() [ 2271.578902] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2271.578902] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] self.f(*self.args, **self.kw) [ 2271.578902] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2271.578902] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] raise exceptions.translate_fault(task_info.error) [ 2271.578902] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2271.578902] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Faults: ['InvalidArgument'] [ 2271.578902] env[61868]: ERROR nova.compute.manager [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] [ 2271.580382] env[61868]: DEBUG nova.compute.utils [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2271.581090] env[61868]: DEBUG nova.compute.manager [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Build of instance ad095fd9-abd0-4c75-8d7c-10dcebc2caee was re-scheduled: A specified parameter was not correct: fileType [ 2271.581090] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2271.581517] env[61868]: DEBUG nova.compute.manager [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2271.581694] env[61868]: DEBUG nova.compute.manager [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2271.581868] env[61868]: DEBUG nova.compute.manager [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2271.582033] env[61868]: DEBUG nova.network.neutron [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2271.856278] env[61868]: DEBUG nova.network.neutron [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2271.870470] env[61868]: INFO nova.compute.manager [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Took 0.29 seconds to deallocate network for instance. [ 2271.970020] env[61868]: INFO nova.scheduler.client.report [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Deleted allocations for instance ad095fd9-abd0-4c75-8d7c-10dcebc2caee [ 2271.991370] env[61868]: DEBUG oslo_concurrency.lockutils [None req-f2f5ea90-8b0b-41d5-ba66-58f170f98acf tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "ad095fd9-abd0-4c75-8d7c-10dcebc2caee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 595.402s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2271.992120] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c70c7c3c-a6fd-4748-b875-9d83252b7d0e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "ad095fd9-abd0-4c75-8d7c-10dcebc2caee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 400.194s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2271.992479] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c70c7c3c-a6fd-4748-b875-9d83252b7d0e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "ad095fd9-abd0-4c75-8d7c-10dcebc2caee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2271.993186] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c70c7c3c-a6fd-4748-b875-9d83252b7d0e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "ad095fd9-abd0-4c75-8d7c-10dcebc2caee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2271.993499] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c70c7c3c-a6fd-4748-b875-9d83252b7d0e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "ad095fd9-abd0-4c75-8d7c-10dcebc2caee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2271.997165] env[61868]: INFO nova.compute.manager [None req-c70c7c3c-a6fd-4748-b875-9d83252b7d0e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Terminating instance [ 2271.999238] env[61868]: DEBUG nova.compute.manager [None req-c70c7c3c-a6fd-4748-b875-9d83252b7d0e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2271.999445] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c70c7c3c-a6fd-4748-b875-9d83252b7d0e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2271.999716] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f729f30f-cc5a-4ad4-9c60-690a52a27394 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2272.009743] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1af9d02d-54f4-4a50-aaac-04b2f3b079f5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2272.036315] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-c70c7c3c-a6fd-4748-b875-9d83252b7d0e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ad095fd9-abd0-4c75-8d7c-10dcebc2caee could not be found. [ 2272.036543] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c70c7c3c-a6fd-4748-b875-9d83252b7d0e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2272.036729] env[61868]: INFO nova.compute.manager [None req-c70c7c3c-a6fd-4748-b875-9d83252b7d0e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2272.036982] env[61868]: DEBUG oslo.service.loopingcall [None req-c70c7c3c-a6fd-4748-b875-9d83252b7d0e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2272.037628] env[61868]: DEBUG nova.compute.manager [-] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2272.037733] env[61868]: DEBUG nova.network.neutron [-] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2272.064483] env[61868]: DEBUG nova.network.neutron [-] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2272.073550] env[61868]: INFO nova.compute.manager [-] [instance: ad095fd9-abd0-4c75-8d7c-10dcebc2caee] Took 0.04 seconds to deallocate network for instance. [ 2272.171550] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c70c7c3c-a6fd-4748-b875-9d83252b7d0e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "ad095fd9-abd0-4c75-8d7c-10dcebc2caee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.179s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2274.485566] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ce5a1719-d4fa-4a4a-86b8-0034ab256511 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "e97ea5c4-163c-4870-9744-3f20cf57f53f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2278.351631] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2282.352037] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2282.352037] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2282.352037] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 2282.368654] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2282.368809] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2282.368938] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2282.369066] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2282.369237] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2282.369370] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2282.369492] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 2286.364732] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2291.351520] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2291.351897] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2291.351897] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2291.363834] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2291.364076] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2291.364249] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2291.364415] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2291.365548] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f19b58-3ef0-4c07-bc94-412e1bd4fb87 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.374526] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-030a5c19-caed-4a66-97c5-158e3f22f7a3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.388320] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f8bf8f-bd46-4eca-a7a5-f53f4f883286 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.394606] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8954039-4a7c-400d-b70f-8e294f039a62 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.422436] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181909MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2291.422579] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2291.422768] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2291.475479] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance fea9d55d-d045-4d4e-b647-044e4729f21c actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.475637] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 47821dd7-73ae-40eb-b7f2-7b656737cd1f actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.475762] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4a4e7cbe-dc5d-4643-b115-0142b5c978de actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.475884] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8484d90b-13a3-41af-a88a-856a8770a4ce actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.476011] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 803c1598-d083-405d-80a1-6adf3fbd2f96 actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.476161] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance e97ea5c4-163c-4870-9744-3f20cf57f53f actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.476342] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2291.476483] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2291.550506] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f75dc714-a40a-4f24-b35b-432cd1ab1e0b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.558122] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e5e4e05-11fa-4830-a327-8184f80e21d6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.587143] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8390c0ae-2e9f-4c40-9135-4f3821dffb19 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.595096] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b1c1f9-22d3-4fac-8f16-7877163b137d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.609353] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2291.617878] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2291.635560] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2291.635752] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.213s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2292.636125] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2293.351583] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2293.351822] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2293.351975] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 2321.236235] env[61868]: WARNING oslo_vmware.rw_handles [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2321.236235] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2321.236235] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2321.236235] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2321.236235] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2321.236235] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 2321.236235] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2321.236235] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2321.236235] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2321.236235] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2321.236235] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2321.236235] env[61868]: ERROR oslo_vmware.rw_handles [ 2321.237333] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/1b012d7a-3441-4404-ab11-74bd0b975927/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2321.238681] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2321.238955] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Copying Virtual Disk [datastore2] vmware_temp/1b012d7a-3441-4404-ab11-74bd0b975927/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/1b012d7a-3441-4404-ab11-74bd0b975927/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2321.239303] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b5d40b13-e20d-4f3e-9aeb-dc8755c6929c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.248525] env[61868]: DEBUG oslo_vmware.api [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for the task: (returnval){ [ 2321.248525] env[61868]: value = "task-41214" [ 2321.248525] env[61868]: _type = "Task" [ 2321.248525] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2321.258582] env[61868]: DEBUG oslo_vmware.api [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': task-41214, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2321.758454] env[61868]: DEBUG oslo_vmware.exceptions [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2321.758740] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2321.759296] env[61868]: ERROR nova.compute.manager [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2321.759296] env[61868]: Faults: ['InvalidArgument'] [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Traceback (most recent call last): [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] yield resources [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] self.driver.spawn(context, instance, image_meta, [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] self._fetch_image_if_missing(context, vi) [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] image_cache(vi, tmp_image_ds_loc) [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] vm_util.copy_virtual_disk( [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] session._wait_for_task(vmdk_copy_task) [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] return self.wait_for_task(task_ref) [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] return evt.wait() [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] result = hub.switch() [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] return self.greenlet.switch() [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] self.f(*self.args, **self.kw) [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] raise exceptions.translate_fault(task_info.error) [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Faults: ['InvalidArgument'] [ 2321.759296] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] [ 2321.761109] env[61868]: INFO nova.compute.manager [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Terminating instance [ 2321.761177] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2321.761405] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2321.761659] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-613e56c2-cfa8-4238-9041-5f897caa5df8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.764302] env[61868]: DEBUG nova.compute.manager [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2321.765006] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2321.765237] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a66097-c02b-4e41-8f22-575d9d62e1fc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.773126] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2321.773540] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1f83ff13-5457-4652-ad15-3b7c4824bcb7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.776242] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2321.776421] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2321.777926] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f80a8f7-41eb-4694-a25e-c67bb9145f7e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.784403] env[61868]: DEBUG oslo_vmware.api [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Waiting for the task: (returnval){ [ 2321.784403] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]5205c254-f531-5aad-3b08-1d36712d054f" [ 2321.784403] env[61868]: _type = "Task" [ 2321.784403] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2321.792434] env[61868]: DEBUG oslo_vmware.api [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]5205c254-f531-5aad-3b08-1d36712d054f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2321.850086] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2321.850323] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2321.850474] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Deleting the datastore file [datastore2] fea9d55d-d045-4d4e-b647-044e4729f21c {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2321.850754] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-24b0cca4-c922-4f02-85ff-59e1c5a97c0b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.857603] env[61868]: DEBUG oslo_vmware.api [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for the task: (returnval){ [ 2321.857603] env[61868]: value = "task-41216" [ 2321.857603] env[61868]: _type = "Task" [ 2321.857603] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2321.866475] env[61868]: DEBUG oslo_vmware.api [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': task-41216, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2322.295175] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2322.295740] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Creating directory with path [datastore2] vmware_temp/0c0bc45d-44f3-4c14-8304-45678123e439/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2322.295740] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1db0b2be-5c4c-43a2-96f1-d5c76efa89b3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.307772] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Created directory with path [datastore2] vmware_temp/0c0bc45d-44f3-4c14-8304-45678123e439/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2322.308041] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Fetch image to [datastore2] vmware_temp/0c0bc45d-44f3-4c14-8304-45678123e439/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2322.308213] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/0c0bc45d-44f3-4c14-8304-45678123e439/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2322.308880] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-132fe8b2-9e73-436c-859f-3eba5c24f36b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.316420] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a585d1-850e-417f-8a7c-e42763522e92 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.328718] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f11914e6-9e35-4de0-8a7f-d9e8fcf7ead4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.365404] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d6c658c-cc99-426f-8c36-e2c2221e343a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.376402] env[61868]: DEBUG oslo_vmware.api [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': task-41216, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081117} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2322.376618] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8c093dee-f375-47cd-a7f3-7a4595367dc7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.378442] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2322.378634] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2322.378803] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2322.378974] env[61868]: INFO nova.compute.manager [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2322.381197] env[61868]: DEBUG nova.compute.claims [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2322.381391] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2322.381683] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2322.404631] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2322.461529] env[61868]: DEBUG oslo_vmware.rw_handles [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0c0bc45d-44f3-4c14-8304-45678123e439/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2322.518593] env[61868]: DEBUG oslo_vmware.rw_handles [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2322.518793] env[61868]: DEBUG oslo_vmware.rw_handles [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0c0bc45d-44f3-4c14-8304-45678123e439/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2322.564300] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4857acd-c3a3-44ce-b6a9-6bfdb0cf22c8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.572412] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c8660b-f443-4f97-b2e0-7c8c4c540d49 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.601786] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5722011-f063-45bc-8134-1f42adcaa4c6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.609727] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b6ed3d7-3c0e-4444-a392-985250c5eab3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.623153] env[61868]: DEBUG nova.compute.provider_tree [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2322.633098] env[61868]: DEBUG nova.scheduler.client.report [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2322.651232] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.269s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2322.651791] env[61868]: ERROR nova.compute.manager [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2322.651791] env[61868]: Faults: ['InvalidArgument'] [ 2322.651791] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Traceback (most recent call last): [ 2322.651791] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2322.651791] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] self.driver.spawn(context, instance, image_meta, [ 2322.651791] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2322.651791] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2322.651791] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2322.651791] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] self._fetch_image_if_missing(context, vi) [ 2322.651791] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2322.651791] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] image_cache(vi, tmp_image_ds_loc) [ 2322.651791] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2322.651791] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] vm_util.copy_virtual_disk( [ 2322.651791] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2322.651791] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] session._wait_for_task(vmdk_copy_task) [ 2322.651791] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2322.651791] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] return self.wait_for_task(task_ref) [ 2322.651791] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2322.651791] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] return evt.wait() [ 2322.651791] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2322.651791] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] result = hub.switch() [ 2322.651791] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2322.651791] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] return self.greenlet.switch() [ 2322.651791] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2322.651791] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] self.f(*self.args, **self.kw) [ 2322.651791] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2322.651791] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] raise exceptions.translate_fault(task_info.error) [ 2322.651791] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2322.651791] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Faults: ['InvalidArgument'] [ 2322.651791] env[61868]: ERROR nova.compute.manager [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] [ 2322.652757] env[61868]: DEBUG nova.compute.utils [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2322.653996] env[61868]: DEBUG nova.compute.manager [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Build of instance fea9d55d-d045-4d4e-b647-044e4729f21c was re-scheduled: A specified parameter was not correct: fileType [ 2322.653996] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2322.654375] env[61868]: DEBUG nova.compute.manager [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2322.654553] env[61868]: DEBUG nova.compute.manager [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2322.654723] env[61868]: DEBUG nova.compute.manager [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2322.654894] env[61868]: DEBUG nova.network.neutron [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2322.930894] env[61868]: DEBUG nova.network.neutron [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2322.944061] env[61868]: INFO nova.compute.manager [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Took 0.29 seconds to deallocate network for instance. [ 2323.049167] env[61868]: INFO nova.scheduler.client.report [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Deleted allocations for instance fea9d55d-d045-4d4e-b647-044e4729f21c [ 2323.070456] env[61868]: DEBUG oslo_concurrency.lockutils [None req-e3edebb4-312c-4d5a-9099-8eaa39a25a55 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "fea9d55d-d045-4d4e-b647-044e4729f21c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 636.511s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2323.070901] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9a92d1ec-c798-457d-8bc7-17f1aa7c5b0b tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "fea9d55d-d045-4d4e-b647-044e4729f21c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 441.489s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2323.071183] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9a92d1ec-c798-457d-8bc7-17f1aa7c5b0b tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "fea9d55d-d045-4d4e-b647-044e4729f21c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2323.071531] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9a92d1ec-c798-457d-8bc7-17f1aa7c5b0b tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "fea9d55d-d045-4d4e-b647-044e4729f21c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2323.071762] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9a92d1ec-c798-457d-8bc7-17f1aa7c5b0b tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "fea9d55d-d045-4d4e-b647-044e4729f21c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2323.074082] env[61868]: INFO nova.compute.manager [None req-9a92d1ec-c798-457d-8bc7-17f1aa7c5b0b tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Terminating instance [ 2323.076036] env[61868]: DEBUG nova.compute.manager [None req-9a92d1ec-c798-457d-8bc7-17f1aa7c5b0b tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2323.076336] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9a92d1ec-c798-457d-8bc7-17f1aa7c5b0b tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2323.076949] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-13337bc6-d8ec-4751-9190-76f184bfdd58 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2323.087297] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55701fa4-f1e6-45da-b25e-8e02d3abcb06 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2323.118337] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-9a92d1ec-c798-457d-8bc7-17f1aa7c5b0b tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fea9d55d-d045-4d4e-b647-044e4729f21c could not be found. [ 2323.118753] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9a92d1ec-c798-457d-8bc7-17f1aa7c5b0b tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2323.119063] env[61868]: INFO nova.compute.manager [None req-9a92d1ec-c798-457d-8bc7-17f1aa7c5b0b tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2323.119461] env[61868]: DEBUG oslo.service.loopingcall [None req-9a92d1ec-c798-457d-8bc7-17f1aa7c5b0b tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2323.119831] env[61868]: DEBUG nova.compute.manager [-] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2323.120051] env[61868]: DEBUG nova.network.neutron [-] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2323.154345] env[61868]: DEBUG nova.network.neutron [-] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2323.164045] env[61868]: INFO nova.compute.manager [-] [instance: fea9d55d-d045-4d4e-b647-044e4729f21c] Took 0.04 seconds to deallocate network for instance. [ 2323.263887] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9a92d1ec-c798-457d-8bc7-17f1aa7c5b0b tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "fea9d55d-d045-4d4e-b647-044e4729f21c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.193s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2338.353135] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2340.358934] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2344.353612] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2344.353972] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2344.353972] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 2344.368998] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2344.369191] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2344.369420] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2344.369572] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2344.369838] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2344.370012] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 2346.364573] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2349.352036] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2349.352371] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Cleaning up deleted instances with incomplete migration {{(pid=61868) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 2349.643565] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2349.644026] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Getting list of instances from cluster (obj){ [ 2349.644026] env[61868]: value = "domain-c8" [ 2349.644026] env[61868]: _type = "ClusterComputeResource" [ 2349.644026] env[61868]: } {{(pid=61868) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2349.645524] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f634402-969e-4953-903c-80c67cc9eb3e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2349.658683] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Got total of 5 instances {{(pid=61868) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2350.377013] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2351.351905] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2352.351756] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2352.352161] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2352.362163] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2352.362400] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2352.362573] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2352.362731] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2352.363872] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45e8efad-c815-4bed-8469-2233148521f6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.373216] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd7a184a-d4fa-4125-b177-e1b9b515164a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.387911] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b96a7b8-7013-464a-bb4e-14282ba329d4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.395342] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a276c455-95dd-41f9-a5d0-636520e683b9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.425635] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181940MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2352.425829] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2352.425978] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2352.499422] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 47821dd7-73ae-40eb-b7f2-7b656737cd1f actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2352.499603] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4a4e7cbe-dc5d-4643-b115-0142b5c978de actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2352.499732] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8484d90b-13a3-41af-a88a-856a8770a4ce actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2352.499854] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 803c1598-d083-405d-80a1-6adf3fbd2f96 actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2352.499971] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance e97ea5c4-163c-4870-9744-3f20cf57f53f actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2352.500210] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2352.500352] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2352.515560] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Refreshing inventories for resource provider 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2352.528522] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Updating ProviderTree inventory for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2352.528715] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Updating inventory in ProviderTree for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2352.539685] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Refreshing aggregate associations for resource provider 6539a0d3-09f9-481f-a837-7ea10081c3cc, aggregates: None {{(pid=61868) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2352.556100] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Refreshing trait associations for resource provider 6539a0d3-09f9-481f-a837-7ea10081c3cc, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61868) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2352.631935] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-567c9712-0cd5-4433-8c2a-7757d54e7e30 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.640448] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9775a2e-46d2-4373-966a-b2360c0288e8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.676505] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25df9a42-3810-400b-8153-3289ecccdf5b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.684639] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed3390e0-5357-4409-b6b7-7d973ae0aabc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.698297] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2352.707631] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2352.723708] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2352.723945] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.298s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2353.724137] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2354.352053] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2354.352294] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Cleaning up deleted instances {{(pid=61868) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 2354.361554] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] There are 0 instances to clean {{(pid=61868) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 2355.361112] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2355.361412] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2355.361564] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 2368.612867] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2368.628309] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Getting list of instances from cluster (obj){ [ 2368.628309] env[61868]: value = "domain-c8" [ 2368.628309] env[61868]: _type = "ClusterComputeResource" [ 2368.628309] env[61868]: } {{(pid=61868) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2368.629567] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-668a654e-9969-4a83-be50-715a4836ec18 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.643318] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Got total of 5 instances {{(pid=61868) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2368.643484] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Triggering sync for uuid 47821dd7-73ae-40eb-b7f2-7b656737cd1f {{(pid=61868) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10342}} [ 2368.643676] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Triggering sync for uuid 4a4e7cbe-dc5d-4643-b115-0142b5c978de {{(pid=61868) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10342}} [ 2368.643838] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Triggering sync for uuid 8484d90b-13a3-41af-a88a-856a8770a4ce {{(pid=61868) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10342}} [ 2368.643988] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Triggering sync for uuid 803c1598-d083-405d-80a1-6adf3fbd2f96 {{(pid=61868) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10342}} [ 2368.644159] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Triggering sync for uuid e97ea5c4-163c-4870-9744-3f20cf57f53f {{(pid=61868) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10342}} [ 2368.644477] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "47821dd7-73ae-40eb-b7f2-7b656737cd1f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2368.644718] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "4a4e7cbe-dc5d-4643-b115-0142b5c978de" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2368.644917] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "8484d90b-13a3-41af-a88a-856a8770a4ce" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2368.645129] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "803c1598-d083-405d-80a1-6adf3fbd2f96" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2368.645363] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "e97ea5c4-163c-4870-9744-3f20cf57f53f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2371.593314] env[61868]: WARNING oslo_vmware.rw_handles [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2371.593314] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2371.593314] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2371.593314] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2371.593314] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2371.593314] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 2371.593314] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2371.593314] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2371.593314] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2371.593314] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2371.593314] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2371.593314] env[61868]: ERROR oslo_vmware.rw_handles [ 2371.594264] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/0c0bc45d-44f3-4c14-8304-45678123e439/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2371.595765] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2371.596015] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Copying Virtual Disk [datastore2] vmware_temp/0c0bc45d-44f3-4c14-8304-45678123e439/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/0c0bc45d-44f3-4c14-8304-45678123e439/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2371.596301] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-405b9191-ee61-42ce-ac0b-494cece4272e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.604505] env[61868]: DEBUG oslo_vmware.api [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Waiting for the task: (returnval){ [ 2371.604505] env[61868]: value = "task-41217" [ 2371.604505] env[61868]: _type = "Task" [ 2371.604505] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2371.612708] env[61868]: DEBUG oslo_vmware.api [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Task: {'id': task-41217, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2372.115046] env[61868]: DEBUG oslo_vmware.exceptions [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2372.115272] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2372.115838] env[61868]: ERROR nova.compute.manager [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2372.115838] env[61868]: Faults: ['InvalidArgument'] [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Traceback (most recent call last): [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] yield resources [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] self.driver.spawn(context, instance, image_meta, [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] self._fetch_image_if_missing(context, vi) [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] image_cache(vi, tmp_image_ds_loc) [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] vm_util.copy_virtual_disk( [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] session._wait_for_task(vmdk_copy_task) [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] return self.wait_for_task(task_ref) [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] return evt.wait() [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] result = hub.switch() [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] return self.greenlet.switch() [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] self.f(*self.args, **self.kw) [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] raise exceptions.translate_fault(task_info.error) [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Faults: ['InvalidArgument'] [ 2372.115838] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] [ 2372.117489] env[61868]: INFO nova.compute.manager [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Terminating instance [ 2372.117781] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2372.117988] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2372.118233] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6ecbd1be-c82e-45d8-9aa4-2edde7f72c78 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.120692] env[61868]: DEBUG nova.compute.manager [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2372.120886] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2372.121655] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-213c58f3-7821-4f61-8918-c627dcb225be {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.128326] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2372.128546] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-43e04077-1ef5-4446-831e-5d7b05e60061 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.130742] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2372.130911] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2372.131862] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-366a5e0a-cd85-449c-8c9c-4177f8646f13 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.136964] env[61868]: DEBUG oslo_vmware.api [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Waiting for the task: (returnval){ [ 2372.136964] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]524ff7d1-83cd-9a83-a2b3-c68d940ee314" [ 2372.136964] env[61868]: _type = "Task" [ 2372.136964] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2372.144159] env[61868]: DEBUG oslo_vmware.api [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]524ff7d1-83cd-9a83-a2b3-c68d940ee314, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2372.200587] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2372.200908] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2372.201007] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Deleting the datastore file [datastore2] 47821dd7-73ae-40eb-b7f2-7b656737cd1f {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2372.201285] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6585e66c-3720-4ea5-82a9-beccd5317424 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.209206] env[61868]: DEBUG oslo_vmware.api [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Waiting for the task: (returnval){ [ 2372.209206] env[61868]: value = "task-41219" [ 2372.209206] env[61868]: _type = "Task" [ 2372.209206] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2372.219003] env[61868]: DEBUG oslo_vmware.api [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Task: {'id': task-41219, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2372.647199] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2372.647554] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Creating directory with path [datastore2] vmware_temp/509750d3-66d3-4ad8-bb17-20c015f33898/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2372.647691] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9907308d-2551-413f-9682-6674ade5b4eb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.660733] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Created directory with path [datastore2] vmware_temp/509750d3-66d3-4ad8-bb17-20c015f33898/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2372.660932] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Fetch image to [datastore2] vmware_temp/509750d3-66d3-4ad8-bb17-20c015f33898/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2372.661099] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/509750d3-66d3-4ad8-bb17-20c015f33898/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2372.661937] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea36b6da-ad26-42bf-b28c-a3d9016a2a08 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.669382] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-588cc61c-909a-4470-8143-3ea0460224ea {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.679129] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07f80c5d-13bb-49bf-9ced-fa4f74e866e4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.713606] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9763ed94-dab8-42d8-8cfe-50a4b495ad1c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.722848] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-aece241a-b9bd-49f2-9712-4832164b845b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.724690] env[61868]: DEBUG oslo_vmware.api [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Task: {'id': task-41219, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07693} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2372.724945] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2372.725129] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2372.725303] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2372.725480] env[61868]: INFO nova.compute.manager [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2372.727619] env[61868]: DEBUG nova.compute.claims [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2372.727785] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2372.727997] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2372.746405] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2372.818259] env[61868]: DEBUG oslo_vmware.rw_handles [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/509750d3-66d3-4ad8-bb17-20c015f33898/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2372.875147] env[61868]: DEBUG oslo_vmware.rw_handles [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2372.875326] env[61868]: DEBUG oslo_vmware.rw_handles [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/509750d3-66d3-4ad8-bb17-20c015f33898/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2372.896691] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2078ef-d6fb-4e21-9f85-832019d44b77 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.904981] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf6fa24a-066e-4853-8dfb-5ea5bee98883 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.934212] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e411c69f-982f-488f-87f6-7103c15cdd53 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.941963] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-430cd656-2e3b-4c59-a4a3-ed98f46c7a2d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.956671] env[61868]: DEBUG nova.compute.provider_tree [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2372.964751] env[61868]: DEBUG nova.scheduler.client.report [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2372.980668] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.253s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2372.981217] env[61868]: ERROR nova.compute.manager [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2372.981217] env[61868]: Faults: ['InvalidArgument'] [ 2372.981217] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Traceback (most recent call last): [ 2372.981217] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2372.981217] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] self.driver.spawn(context, instance, image_meta, [ 2372.981217] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2372.981217] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2372.981217] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2372.981217] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] self._fetch_image_if_missing(context, vi) [ 2372.981217] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2372.981217] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] image_cache(vi, tmp_image_ds_loc) [ 2372.981217] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2372.981217] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] vm_util.copy_virtual_disk( [ 2372.981217] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2372.981217] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] session._wait_for_task(vmdk_copy_task) [ 2372.981217] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2372.981217] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] return self.wait_for_task(task_ref) [ 2372.981217] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2372.981217] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] return evt.wait() [ 2372.981217] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2372.981217] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] result = hub.switch() [ 2372.981217] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2372.981217] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] return self.greenlet.switch() [ 2372.981217] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2372.981217] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] self.f(*self.args, **self.kw) [ 2372.981217] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2372.981217] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] raise exceptions.translate_fault(task_info.error) [ 2372.981217] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2372.981217] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Faults: ['InvalidArgument'] [ 2372.981217] env[61868]: ERROR nova.compute.manager [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] [ 2372.982168] env[61868]: DEBUG nova.compute.utils [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2372.983544] env[61868]: DEBUG nova.compute.manager [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Build of instance 47821dd7-73ae-40eb-b7f2-7b656737cd1f was re-scheduled: A specified parameter was not correct: fileType [ 2372.983544] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2372.983969] env[61868]: DEBUG nova.compute.manager [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2372.984168] env[61868]: DEBUG nova.compute.manager [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2372.984340] env[61868]: DEBUG nova.compute.manager [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2372.984509] env[61868]: DEBUG nova.network.neutron [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2373.453759] env[61868]: DEBUG nova.network.neutron [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2373.467428] env[61868]: INFO nova.compute.manager [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Took 0.48 seconds to deallocate network for instance. [ 2373.557202] env[61868]: INFO nova.scheduler.client.report [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Deleted allocations for instance 47821dd7-73ae-40eb-b7f2-7b656737cd1f [ 2373.576109] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c11b35ca-5602-4786-a171-b11a20b401ee tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Lock "47821dd7-73ae-40eb-b7f2-7b656737cd1f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 531.366s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2373.576372] env[61868]: DEBUG oslo_concurrency.lockutils [None req-82bb9b1b-c5b1-416a-811f-659f352d6677 tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Lock "47821dd7-73ae-40eb-b7f2-7b656737cd1f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 335.013s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2373.576591] env[61868]: DEBUG oslo_concurrency.lockutils [None req-82bb9b1b-c5b1-416a-811f-659f352d6677 tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Acquiring lock "47821dd7-73ae-40eb-b7f2-7b656737cd1f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2373.576797] env[61868]: DEBUG oslo_concurrency.lockutils [None req-82bb9b1b-c5b1-416a-811f-659f352d6677 tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Lock "47821dd7-73ae-40eb-b7f2-7b656737cd1f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2373.576960] env[61868]: DEBUG oslo_concurrency.lockutils [None req-82bb9b1b-c5b1-416a-811f-659f352d6677 tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Lock "47821dd7-73ae-40eb-b7f2-7b656737cd1f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2373.578884] env[61868]: INFO nova.compute.manager [None req-82bb9b1b-c5b1-416a-811f-659f352d6677 tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Terminating instance [ 2373.580649] env[61868]: DEBUG nova.compute.manager [None req-82bb9b1b-c5b1-416a-811f-659f352d6677 tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2373.580840] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-82bb9b1b-c5b1-416a-811f-659f352d6677 tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2373.581351] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-86a66939-472a-4f9c-be13-67396f194601 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.591217] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac4c29f-a5c3-41f0-9fce-937392134a58 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.616537] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-82bb9b1b-c5b1-416a-811f-659f352d6677 tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 47821dd7-73ae-40eb-b7f2-7b656737cd1f could not be found. [ 2373.616751] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-82bb9b1b-c5b1-416a-811f-659f352d6677 tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2373.616928] env[61868]: INFO nova.compute.manager [None req-82bb9b1b-c5b1-416a-811f-659f352d6677 tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2373.617168] env[61868]: DEBUG oslo.service.loopingcall [None req-82bb9b1b-c5b1-416a-811f-659f352d6677 tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2373.617404] env[61868]: DEBUG nova.compute.manager [-] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2373.617497] env[61868]: DEBUG nova.network.neutron [-] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2373.646038] env[61868]: DEBUG nova.network.neutron [-] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2373.654798] env[61868]: INFO nova.compute.manager [-] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] Took 0.04 seconds to deallocate network for instance. [ 2373.750712] env[61868]: DEBUG oslo_concurrency.lockutils [None req-82bb9b1b-c5b1-416a-811f-659f352d6677 tempest-ServerAddressesTestJSON-825595071 tempest-ServerAddressesTestJSON-825595071-project-member] Lock "47821dd7-73ae-40eb-b7f2-7b656737cd1f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.174s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2373.751610] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "47821dd7-73ae-40eb-b7f2-7b656737cd1f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 5.107s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2373.751799] env[61868]: INFO nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 47821dd7-73ae-40eb-b7f2-7b656737cd1f] During sync_power_state the instance has a pending task (deleting). Skip. [ 2373.751970] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "47821dd7-73ae-40eb-b7f2-7b656737cd1f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2379.481240] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Acquiring lock "43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2379.481616] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Lock "43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2379.493666] env[61868]: DEBUG nova.compute.manager [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2379.543628] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2379.543877] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2379.545455] env[61868]: INFO nova.compute.claims [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2379.670642] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0afcbd4-bce9-4b18-bf3e-8b11f8ac70ae {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.678746] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a52939eb-97fd-48c8-9f15-48d1dd37fb06 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.709872] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b2634e5-86f0-4d19-bcd6-457b02def5df {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.718213] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fc0df02-97be-4563-8fd7-2122998cde3f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.734187] env[61868]: DEBUG nova.compute.provider_tree [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2379.743584] env[61868]: DEBUG nova.scheduler.client.report [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2379.760689] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.217s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2379.761330] env[61868]: DEBUG nova.compute.manager [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2379.796457] env[61868]: DEBUG nova.compute.utils [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2379.798017] env[61868]: DEBUG nova.compute.manager [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Not allocating networking since 'none' was specified. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 2379.809694] env[61868]: DEBUG nova.compute.manager [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2379.881920] env[61868]: DEBUG nova.compute.manager [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2379.903434] env[61868]: DEBUG nova.virt.hardware [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2379.903705] env[61868]: DEBUG nova.virt.hardware [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2379.903865] env[61868]: DEBUG nova.virt.hardware [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2379.904060] env[61868]: DEBUG nova.virt.hardware [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2379.904210] env[61868]: DEBUG nova.virt.hardware [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2379.904360] env[61868]: DEBUG nova.virt.hardware [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2379.904582] env[61868]: DEBUG nova.virt.hardware [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2379.904765] env[61868]: DEBUG nova.virt.hardware [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2379.904942] env[61868]: DEBUG nova.virt.hardware [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2379.905104] env[61868]: DEBUG nova.virt.hardware [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2379.905275] env[61868]: DEBUG nova.virt.hardware [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2379.906170] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30d278a7-714e-4683-9951-c0470aa079bf {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.914391] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37c19448-7c17-4c17-a89f-b0a1faae147b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.927962] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Instance VIF info [] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2379.933609] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Creating folder: Project (31c5db8a353640b48ee42ffad88cd5c7). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2379.933894] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a53d51bc-e74a-4580-8b97-1a59c06a86b5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.944608] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Created folder: Project (31c5db8a353640b48ee42ffad88cd5c7) in parent group-v18181. [ 2379.944798] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Creating folder: Instances. Parent ref: group-v18310. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2379.945025] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-99bb9d93-d4e1-4a23-8fda-9c809689fcb3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.954487] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Created folder: Instances in parent group-v18310. [ 2379.954715] env[61868]: DEBUG oslo.service.loopingcall [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2379.954897] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2379.955097] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3d0b6584-15a3-4449-b8b6-38cc7b908795 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.970811] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2379.970811] env[61868]: value = "task-41222" [ 2379.970811] env[61868]: _type = "Task" [ 2379.970811] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2379.977889] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41222, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2380.480359] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41222, 'name': CreateVM_Task, 'duration_secs': 0.237128} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2380.480534] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2380.480862] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2380.481084] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2380.483903] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3466f0e8-be4c-484b-b2d8-e2915f98e8a9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.513301] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Reconfiguring VM instance to enable vnc on port - 5902 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 2380.513972] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca25cbd1-5d8f-4826-90cf-d6c169ced166 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.529197] env[61868]: DEBUG oslo_vmware.api [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Waiting for the task: (returnval){ [ 2380.529197] env[61868]: value = "task-41223" [ 2380.529197] env[61868]: _type = "Task" [ 2380.529197] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2380.537166] env[61868]: DEBUG oslo_vmware.api [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Task: {'id': task-41223, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2381.039440] env[61868]: DEBUG oslo_vmware.api [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Task: {'id': task-41223, 'name': ReconfigVM_Task, 'duration_secs': 0.097641} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2381.039729] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Reconfigured VM instance to enable vnc on port - 5902 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 2381.039947] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.559s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2381.040224] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2381.040372] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2381.040734] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2381.040996] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-403bd190-93c2-4b5a-9282-c1c787320b71 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.046020] env[61868]: DEBUG oslo_vmware.api [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Waiting for the task: (returnval){ [ 2381.046020] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52086ced-de8a-b842-bbf3-5b6621303003" [ 2381.046020] env[61868]: _type = "Task" [ 2381.046020] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2381.053667] env[61868]: DEBUG oslo_vmware.api [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52086ced-de8a-b842-bbf3-5b6621303003, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2381.558213] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2381.558598] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2381.558677] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2401.385570] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2402.584701] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "8b79cee2-64a5-45fa-b99d-3aea86d4dc91" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2402.585016] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "8b79cee2-64a5-45fa-b99d-3aea86d4dc91" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2402.597081] env[61868]: DEBUG nova.compute.manager [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2402.648960] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2402.649216] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2402.650758] env[61868]: INFO nova.compute.claims [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2402.781844] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a945fe14-aa30-43ab-b39a-bc1aac2d0d91 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.789850] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-857eaa6d-78d1-4a70-83d8-a9d418e85dcc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.821363] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a70b12d-f63b-441f-af16-3fdfbd076440 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.830567] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1400d66-86af-4a97-84c1-6463c6181afd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.844155] env[61868]: DEBUG nova.compute.provider_tree [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2402.852637] env[61868]: DEBUG nova.scheduler.client.report [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2402.869377] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.220s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2402.869864] env[61868]: DEBUG nova.compute.manager [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2402.904902] env[61868]: DEBUG nova.compute.utils [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2402.907042] env[61868]: DEBUG nova.compute.manager [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2402.907360] env[61868]: DEBUG nova.network.neutron [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2402.918209] env[61868]: DEBUG nova.compute.manager [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2402.953594] env[61868]: DEBUG nova.policy [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bf74f15b527146bb9bc726e54d220a65', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '01d5fac165e449d49cd6e9d9c7e9d116', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 2402.993613] env[61868]: DEBUG nova.compute.manager [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2403.014491] env[61868]: DEBUG nova.virt.hardware [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2403.014746] env[61868]: DEBUG nova.virt.hardware [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2403.014977] env[61868]: DEBUG nova.virt.hardware [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2403.015078] env[61868]: DEBUG nova.virt.hardware [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2403.015225] env[61868]: DEBUG nova.virt.hardware [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2403.015374] env[61868]: DEBUG nova.virt.hardware [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2403.015578] env[61868]: DEBUG nova.virt.hardware [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2403.015731] env[61868]: DEBUG nova.virt.hardware [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2403.015898] env[61868]: DEBUG nova.virt.hardware [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2403.016068] env[61868]: DEBUG nova.virt.hardware [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2403.016258] env[61868]: DEBUG nova.virt.hardware [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2403.017226] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b69593d-6bcc-46e3-8449-0ebb01f6479d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.025942] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f1ea45-bee3-4678-8b2c-439b5acfd0e6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.219425] env[61868]: DEBUG nova.network.neutron [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Successfully created port: 79737faa-7eb8-4fc8-8d20-1904df20bf1d {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2403.690287] env[61868]: DEBUG nova.compute.manager [req-eeee7ef4-7ca5-41a9-8f26-75e1aff8f1cb req-8904e980-35eb-4f22-8f67-cf09688eaf5d service nova] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Received event network-vif-plugged-79737faa-7eb8-4fc8-8d20-1904df20bf1d {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2403.690568] env[61868]: DEBUG oslo_concurrency.lockutils [req-eeee7ef4-7ca5-41a9-8f26-75e1aff8f1cb req-8904e980-35eb-4f22-8f67-cf09688eaf5d service nova] Acquiring lock "8b79cee2-64a5-45fa-b99d-3aea86d4dc91-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2403.690775] env[61868]: DEBUG oslo_concurrency.lockutils [req-eeee7ef4-7ca5-41a9-8f26-75e1aff8f1cb req-8904e980-35eb-4f22-8f67-cf09688eaf5d service nova] Lock "8b79cee2-64a5-45fa-b99d-3aea86d4dc91-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2403.690944] env[61868]: DEBUG oslo_concurrency.lockutils [req-eeee7ef4-7ca5-41a9-8f26-75e1aff8f1cb req-8904e980-35eb-4f22-8f67-cf09688eaf5d service nova] Lock "8b79cee2-64a5-45fa-b99d-3aea86d4dc91-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2403.691114] env[61868]: DEBUG nova.compute.manager [req-eeee7ef4-7ca5-41a9-8f26-75e1aff8f1cb req-8904e980-35eb-4f22-8f67-cf09688eaf5d service nova] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] No waiting events found dispatching network-vif-plugged-79737faa-7eb8-4fc8-8d20-1904df20bf1d {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2403.691328] env[61868]: WARNING nova.compute.manager [req-eeee7ef4-7ca5-41a9-8f26-75e1aff8f1cb req-8904e980-35eb-4f22-8f67-cf09688eaf5d service nova] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Received unexpected event network-vif-plugged-79737faa-7eb8-4fc8-8d20-1904df20bf1d for instance with vm_state building and task_state spawning. [ 2403.761377] env[61868]: DEBUG nova.network.neutron [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Successfully updated port: 79737faa-7eb8-4fc8-8d20-1904df20bf1d {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2403.773305] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "refresh_cache-8b79cee2-64a5-45fa-b99d-3aea86d4dc91" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2403.773446] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquired lock "refresh_cache-8b79cee2-64a5-45fa-b99d-3aea86d4dc91" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2403.773603] env[61868]: DEBUG nova.network.neutron [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 2403.810258] env[61868]: DEBUG nova.network.neutron [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 2403.972420] env[61868]: DEBUG nova.network.neutron [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Updating instance_info_cache with network_info: [{"id": "79737faa-7eb8-4fc8-8d20-1904df20bf1d", "address": "fa:16:3e:44:32:40", "network": {"id": "843ef760-8abf-4cfe-bcb8-9ec2e65dca2f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1286639074-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "01d5fac165e449d49cd6e9d9c7e9d116", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79737faa-7e", "ovs_interfaceid": "79737faa-7eb8-4fc8-8d20-1904df20bf1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2404.014614] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Releasing lock "refresh_cache-8b79cee2-64a5-45fa-b99d-3aea86d4dc91" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2404.014905] env[61868]: DEBUG nova.compute.manager [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Instance network_info: |[{"id": "79737faa-7eb8-4fc8-8d20-1904df20bf1d", "address": "fa:16:3e:44:32:40", "network": {"id": "843ef760-8abf-4cfe-bcb8-9ec2e65dca2f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1286639074-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "01d5fac165e449d49cd6e9d9c7e9d116", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79737faa-7e", "ovs_interfaceid": "79737faa-7eb8-4fc8-8d20-1904df20bf1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2404.015369] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:32:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4020f51-6e46-4b73-a79e-9fe3fd51b917', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '79737faa-7eb8-4fc8-8d20-1904df20bf1d', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2404.023720] env[61868]: DEBUG oslo.service.loopingcall [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2404.024360] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2404.024610] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9cc4b8b6-1654-408a-bf08-1e5bd00bdca8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.047121] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2404.047121] env[61868]: value = "task-41224" [ 2404.047121] env[61868]: _type = "Task" [ 2404.047121] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2404.056127] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41224, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2404.557454] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41224, 'name': CreateVM_Task, 'duration_secs': 0.306161} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2404.557629] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2404.558223] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2404.558448] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2404.561315] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cee679b6-2e3d-4e6a-bafa-977926d687f8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.595075] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Reconfiguring VM instance to enable vnc on port - 5903 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 2404.595404] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26f64858-2802-4e87-a5ad-31dd412be5bd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.610946] env[61868]: DEBUG oslo_vmware.api [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for the task: (returnval){ [ 2404.610946] env[61868]: value = "task-41225" [ 2404.610946] env[61868]: _type = "Task" [ 2404.610946] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2404.619029] env[61868]: DEBUG oslo_vmware.api [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': task-41225, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2405.121257] env[61868]: DEBUG oslo_vmware.api [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': task-41225, 'name': ReconfigVM_Task, 'duration_secs': 0.104032} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2405.121608] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Reconfigured VM instance to enable vnc on port - 5903 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 2405.121814] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.563s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2405.122063] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2405.122209] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2405.122553] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2405.122819] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7cd82751-75dd-4f78-9579-23bfdd8ac486 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.128641] env[61868]: DEBUG oslo_vmware.api [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for the task: (returnval){ [ 2405.128641] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52856c05-60cf-7d28-d72f-0d3e2f88bbc5" [ 2405.128641] env[61868]: _type = "Task" [ 2405.128641] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2405.138055] env[61868]: DEBUG oslo_vmware.api [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52856c05-60cf-7d28-d72f-0d3e2f88bbc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2405.639459] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2405.640137] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2405.640394] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2405.723549] env[61868]: DEBUG nova.compute.manager [req-a2a46012-7999-45a8-bd8a-e21c45f4b853 req-c71d20ac-3b94-41ea-af5d-85a51bbc8761 service nova] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Received event network-changed-79737faa-7eb8-4fc8-8d20-1904df20bf1d {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2405.723727] env[61868]: DEBUG nova.compute.manager [req-a2a46012-7999-45a8-bd8a-e21c45f4b853 req-c71d20ac-3b94-41ea-af5d-85a51bbc8761 service nova] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Refreshing instance network info cache due to event network-changed-79737faa-7eb8-4fc8-8d20-1904df20bf1d. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2405.723943] env[61868]: DEBUG oslo_concurrency.lockutils [req-a2a46012-7999-45a8-bd8a-e21c45f4b853 req-c71d20ac-3b94-41ea-af5d-85a51bbc8761 service nova] Acquiring lock "refresh_cache-8b79cee2-64a5-45fa-b99d-3aea86d4dc91" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2405.724105] env[61868]: DEBUG oslo_concurrency.lockutils [req-a2a46012-7999-45a8-bd8a-e21c45f4b853 req-c71d20ac-3b94-41ea-af5d-85a51bbc8761 service nova] Acquired lock "refresh_cache-8b79cee2-64a5-45fa-b99d-3aea86d4dc91" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2405.724272] env[61868]: DEBUG nova.network.neutron [req-a2a46012-7999-45a8-bd8a-e21c45f4b853 req-c71d20ac-3b94-41ea-af5d-85a51bbc8761 service nova] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Refreshing network info cache for port 79737faa-7eb8-4fc8-8d20-1904df20bf1d {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 2405.949085] env[61868]: DEBUG nova.network.neutron [req-a2a46012-7999-45a8-bd8a-e21c45f4b853 req-c71d20ac-3b94-41ea-af5d-85a51bbc8761 service nova] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Updated VIF entry in instance network info cache for port 79737faa-7eb8-4fc8-8d20-1904df20bf1d. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 2405.949453] env[61868]: DEBUG nova.network.neutron [req-a2a46012-7999-45a8-bd8a-e21c45f4b853 req-c71d20ac-3b94-41ea-af5d-85a51bbc8761 service nova] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Updating instance_info_cache with network_info: [{"id": "79737faa-7eb8-4fc8-8d20-1904df20bf1d", "address": "fa:16:3e:44:32:40", "network": {"id": "843ef760-8abf-4cfe-bcb8-9ec2e65dca2f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1286639074-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "01d5fac165e449d49cd6e9d9c7e9d116", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79737faa-7e", "ovs_interfaceid": "79737faa-7eb8-4fc8-8d20-1904df20bf1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2405.958687] env[61868]: DEBUG oslo_concurrency.lockutils [req-a2a46012-7999-45a8-bd8a-e21c45f4b853 req-c71d20ac-3b94-41ea-af5d-85a51bbc8761 service nova] Releasing lock "refresh_cache-8b79cee2-64a5-45fa-b99d-3aea86d4dc91" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2406.351214] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2406.351599] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2406.351693] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 2406.367277] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2406.367427] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2406.367558] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2406.367747] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2406.367814] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2406.367921] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2406.368047] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 2408.363791] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2411.351560] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2412.351677] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2413.352026] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2414.351846] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2414.363994] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2414.364366] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2414.364414] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2414.364565] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2414.365670] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c96268e-6eb8-417d-9b8f-5f260059f43e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.374573] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-635ec2a0-dbd3-40e9-a9eb-4cf0a17f130f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.389659] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff0c9bae-4aff-47ad-94f8-269a3611a960 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.395962] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb30577-3c24-4d6f-92cd-3d5e6eaaafda {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.424292] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181934MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2414.424453] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2414.424661] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2414.477567] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 4a4e7cbe-dc5d-4643-b115-0142b5c978de actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2414.477731] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8484d90b-13a3-41af-a88a-856a8770a4ce actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2414.477858] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 803c1598-d083-405d-80a1-6adf3fbd2f96 actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2414.477980] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance e97ea5c4-163c-4870-9744-3f20cf57f53f actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2414.478099] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2414.478215] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8b79cee2-64a5-45fa-b99d-3aea86d4dc91 actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2414.478394] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2414.478531] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2414.560027] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-901ddf15-4b1e-4bc5-b18a-f66f68c1962a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.567797] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1934e6e-0e5a-47ec-b660-0e350a9063b6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.598199] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-093eda1e-2942-4935-a394-42f5988c0535 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.606817] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7257591a-3779-4dc2-af34-9aa500f7398a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.620417] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2414.629371] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2414.646472] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2414.646675] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.222s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2415.646385] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2416.351338] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2416.352035] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 2419.944521] env[61868]: WARNING oslo_vmware.rw_handles [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2419.944521] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2419.944521] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2419.944521] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2419.944521] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2419.944521] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 2419.944521] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2419.944521] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2419.944521] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2419.944521] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2419.944521] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2419.944521] env[61868]: ERROR oslo_vmware.rw_handles [ 2419.945193] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/509750d3-66d3-4ad8-bb17-20c015f33898/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2419.946877] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2419.947124] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Copying Virtual Disk [datastore2] vmware_temp/509750d3-66d3-4ad8-bb17-20c015f33898/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/509750d3-66d3-4ad8-bb17-20c015f33898/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2419.947411] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-252f4b49-b420-48f3-9957-54b73a00ee16 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2419.956093] env[61868]: DEBUG oslo_vmware.api [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Waiting for the task: (returnval){ [ 2419.956093] env[61868]: value = "task-41226" [ 2419.956093] env[61868]: _type = "Task" [ 2419.956093] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2419.964073] env[61868]: DEBUG oslo_vmware.api [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Task: {'id': task-41226, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2420.467646] env[61868]: DEBUG oslo_vmware.exceptions [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2420.467953] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2420.468541] env[61868]: ERROR nova.compute.manager [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2420.468541] env[61868]: Faults: ['InvalidArgument'] [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Traceback (most recent call last): [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] yield resources [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] self.driver.spawn(context, instance, image_meta, [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] self._fetch_image_if_missing(context, vi) [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] image_cache(vi, tmp_image_ds_loc) [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] vm_util.copy_virtual_disk( [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] session._wait_for_task(vmdk_copy_task) [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] return self.wait_for_task(task_ref) [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] return evt.wait() [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] result = hub.switch() [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] return self.greenlet.switch() [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] self.f(*self.args, **self.kw) [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] raise exceptions.translate_fault(task_info.error) [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Faults: ['InvalidArgument'] [ 2420.468541] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] [ 2420.470377] env[61868]: INFO nova.compute.manager [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Terminating instance [ 2420.470524] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2420.470735] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2420.470979] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-385d6faf-5197-46b4-9594-14a72a05c87a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2420.473350] env[61868]: DEBUG nova.compute.manager [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2420.473540] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2420.474259] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c99560e3-a5d8-496a-bc19-0873fd6fa7d9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2420.481916] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2420.482904] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ec690897-0039-4b73-95ab-4dd3bd18d650 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2420.484245] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2420.484415] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2420.485061] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84496779-dc46-48c0-8dec-205154900b0b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2420.489897] env[61868]: DEBUG oslo_vmware.api [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for the task: (returnval){ [ 2420.489897] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]521f3b2c-3a68-2cbc-08bd-f5ac6bf41fae" [ 2420.489897] env[61868]: _type = "Task" [ 2420.489897] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2420.497717] env[61868]: DEBUG oslo_vmware.api [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]521f3b2c-3a68-2cbc-08bd-f5ac6bf41fae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2420.550637] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2420.550962] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2420.551059] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Deleting the datastore file [datastore2] 4a4e7cbe-dc5d-4643-b115-0142b5c978de {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2420.551304] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-05bbc616-69cc-41d9-98a6-af1becfd483a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2420.558293] env[61868]: DEBUG oslo_vmware.api [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Waiting for the task: (returnval){ [ 2420.558293] env[61868]: value = "task-41228" [ 2420.558293] env[61868]: _type = "Task" [ 2420.558293] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2420.566406] env[61868]: DEBUG oslo_vmware.api [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Task: {'id': task-41228, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2421.001159] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2421.001507] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Creating directory with path [datastore2] vmware_temp/0b1abdc5-65ce-4a34-80de-99aa237f06dd/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2421.001687] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1940a31c-4257-4e71-970e-0b95ca3093ec {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.013317] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Created directory with path [datastore2] vmware_temp/0b1abdc5-65ce-4a34-80de-99aa237f06dd/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2421.013592] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Fetch image to [datastore2] vmware_temp/0b1abdc5-65ce-4a34-80de-99aa237f06dd/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2421.013827] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/0b1abdc5-65ce-4a34-80de-99aa237f06dd/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2421.014603] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-614f2465-73ec-4b79-9064-53e83f203967 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.021773] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d5bf3d7-860e-48c6-a163-98d05cd19ad0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.031322] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-952d4175-fb09-4a0d-9e5e-80faf31859c0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.064910] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e6b239b-7556-4dc1-96d0-00a3352ac72a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.071918] env[61868]: DEBUG oslo_vmware.api [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Task: {'id': task-41228, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075603} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2421.073297] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2421.073481] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2421.073657] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2421.073845] env[61868]: INFO nova.compute.manager [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2421.075558] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-0293ef0f-0aef-420b-98ce-f6cdd0f1a658 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.077429] env[61868]: DEBUG nova.compute.claims [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2421.077602] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2421.077809] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2421.103033] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2421.156184] env[61868]: DEBUG oslo_vmware.rw_handles [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0b1abdc5-65ce-4a34-80de-99aa237f06dd/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2421.214618] env[61868]: DEBUG oslo_vmware.rw_handles [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2421.214812] env[61868]: DEBUG oslo_vmware.rw_handles [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0b1abdc5-65ce-4a34-80de-99aa237f06dd/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2421.247621] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1afe389c-061d-49c2-8ec1-dabf8473205e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.256124] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edaaff03-f50c-481b-8b60-fbb398cf6bfe {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.286608] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8836715d-fa4a-4dca-8a9b-0f0a9e157a2e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.294009] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b31f437-6506-4b24-83da-277afa8d2451 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.309185] env[61868]: DEBUG nova.compute.provider_tree [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2421.317894] env[61868]: DEBUG nova.scheduler.client.report [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2421.333965] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.256s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2421.334525] env[61868]: ERROR nova.compute.manager [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2421.334525] env[61868]: Faults: ['InvalidArgument'] [ 2421.334525] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Traceback (most recent call last): [ 2421.334525] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2421.334525] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] self.driver.spawn(context, instance, image_meta, [ 2421.334525] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2421.334525] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2421.334525] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2421.334525] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] self._fetch_image_if_missing(context, vi) [ 2421.334525] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2421.334525] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] image_cache(vi, tmp_image_ds_loc) [ 2421.334525] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2421.334525] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] vm_util.copy_virtual_disk( [ 2421.334525] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2421.334525] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] session._wait_for_task(vmdk_copy_task) [ 2421.334525] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2421.334525] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] return self.wait_for_task(task_ref) [ 2421.334525] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2421.334525] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] return evt.wait() [ 2421.334525] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2421.334525] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] result = hub.switch() [ 2421.334525] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2421.334525] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] return self.greenlet.switch() [ 2421.334525] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2421.334525] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] self.f(*self.args, **self.kw) [ 2421.334525] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2421.334525] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] raise exceptions.translate_fault(task_info.error) [ 2421.334525] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2421.334525] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Faults: ['InvalidArgument'] [ 2421.334525] env[61868]: ERROR nova.compute.manager [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] [ 2421.335385] env[61868]: DEBUG nova.compute.utils [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2421.336601] env[61868]: DEBUG nova.compute.manager [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Build of instance 4a4e7cbe-dc5d-4643-b115-0142b5c978de was re-scheduled: A specified parameter was not correct: fileType [ 2421.336601] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2421.337076] env[61868]: DEBUG nova.compute.manager [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2421.337134] env[61868]: DEBUG nova.compute.manager [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2421.337361] env[61868]: DEBUG nova.compute.manager [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2421.337554] env[61868]: DEBUG nova.network.neutron [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2421.635507] env[61868]: DEBUG nova.network.neutron [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2421.648508] env[61868]: INFO nova.compute.manager [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Took 0.31 seconds to deallocate network for instance. [ 2421.747565] env[61868]: INFO nova.scheduler.client.report [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Deleted allocations for instance 4a4e7cbe-dc5d-4643-b115-0142b5c978de [ 2421.767677] env[61868]: DEBUG oslo_concurrency.lockutils [None req-7d875463-400a-4752-8f42-613d12e23133 tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "4a4e7cbe-dc5d-4643-b115-0142b5c978de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 437.458s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2421.767903] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6bb6a96b-3725-4744-b18d-f871dbb0ba0a tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "4a4e7cbe-dc5d-4643-b115-0142b5c978de" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 241.442s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2421.768133] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6bb6a96b-3725-4744-b18d-f871dbb0ba0a tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Acquiring lock "4a4e7cbe-dc5d-4643-b115-0142b5c978de-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2421.768346] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6bb6a96b-3725-4744-b18d-f871dbb0ba0a tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "4a4e7cbe-dc5d-4643-b115-0142b5c978de-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2421.768517] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6bb6a96b-3725-4744-b18d-f871dbb0ba0a tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "4a4e7cbe-dc5d-4643-b115-0142b5c978de-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2421.770425] env[61868]: INFO nova.compute.manager [None req-6bb6a96b-3725-4744-b18d-f871dbb0ba0a tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Terminating instance [ 2421.772066] env[61868]: DEBUG nova.compute.manager [None req-6bb6a96b-3725-4744-b18d-f871dbb0ba0a tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2421.772280] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6bb6a96b-3725-4744-b18d-f871dbb0ba0a tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2421.772777] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aa58fc84-c820-4447-bac3-332a637e6d11 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.781875] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf43a08-66dc-4eb5-859b-baf479f0709c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.808172] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-6bb6a96b-3725-4744-b18d-f871dbb0ba0a tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4a4e7cbe-dc5d-4643-b115-0142b5c978de could not be found. [ 2421.808414] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-6bb6a96b-3725-4744-b18d-f871dbb0ba0a tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2421.808622] env[61868]: INFO nova.compute.manager [None req-6bb6a96b-3725-4744-b18d-f871dbb0ba0a tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2421.808856] env[61868]: DEBUG oslo.service.loopingcall [None req-6bb6a96b-3725-4744-b18d-f871dbb0ba0a tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2421.809366] env[61868]: DEBUG nova.compute.manager [-] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2421.809476] env[61868]: DEBUG nova.network.neutron [-] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2421.837280] env[61868]: DEBUG nova.network.neutron [-] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2421.846045] env[61868]: INFO nova.compute.manager [-] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] Took 0.04 seconds to deallocate network for instance. [ 2421.942881] env[61868]: DEBUG oslo_concurrency.lockutils [None req-6bb6a96b-3725-4744-b18d-f871dbb0ba0a tempest-ServerDiskConfigTestJSON-1489146392 tempest-ServerDiskConfigTestJSON-1489146392-project-member] Lock "4a4e7cbe-dc5d-4643-b115-0142b5c978de" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.175s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2421.944435] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "4a4e7cbe-dc5d-4643-b115-0142b5c978de" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 53.300s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2421.944554] env[61868]: INFO nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 4a4e7cbe-dc5d-4643-b115-0142b5c978de] During sync_power_state the instance has a pending task (deleting). Skip. [ 2421.944749] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "4a4e7cbe-dc5d-4643-b115-0142b5c978de" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2462.351461] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2467.352354] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2467.352911] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2467.352911] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 2467.367485] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2467.367664] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2467.367773] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2467.367898] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2467.368026] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2467.368147] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 2468.363028] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2470.371007] env[61868]: WARNING oslo_vmware.rw_handles [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2470.371007] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2470.371007] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2470.371007] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2470.371007] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2470.371007] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 2470.371007] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2470.371007] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2470.371007] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2470.371007] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2470.371007] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2470.371007] env[61868]: ERROR oslo_vmware.rw_handles [ 2470.371635] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/0b1abdc5-65ce-4a34-80de-99aa237f06dd/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2470.373556] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2470.373812] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Copying Virtual Disk [datastore2] vmware_temp/0b1abdc5-65ce-4a34-80de-99aa237f06dd/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/0b1abdc5-65ce-4a34-80de-99aa237f06dd/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2470.374103] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-edbf42d1-5c66-4c25-98a3-29f866420ee2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2470.382715] env[61868]: DEBUG oslo_vmware.api [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for the task: (returnval){ [ 2470.382715] env[61868]: value = "task-41229" [ 2470.382715] env[61868]: _type = "Task" [ 2470.382715] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2470.391000] env[61868]: DEBUG oslo_vmware.api [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': task-41229, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2470.893466] env[61868]: DEBUG oslo_vmware.exceptions [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2470.893770] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2470.894385] env[61868]: ERROR nova.compute.manager [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2470.894385] env[61868]: Faults: ['InvalidArgument'] [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Traceback (most recent call last): [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] yield resources [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] self.driver.spawn(context, instance, image_meta, [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] self._fetch_image_if_missing(context, vi) [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] image_cache(vi, tmp_image_ds_loc) [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] vm_util.copy_virtual_disk( [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] session._wait_for_task(vmdk_copy_task) [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] return self.wait_for_task(task_ref) [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] return evt.wait() [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] result = hub.switch() [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] return self.greenlet.switch() [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] self.f(*self.args, **self.kw) [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] raise exceptions.translate_fault(task_info.error) [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Faults: ['InvalidArgument'] [ 2470.894385] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] [ 2470.895334] env[61868]: INFO nova.compute.manager [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Terminating instance [ 2470.897053] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2470.897053] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2470.897053] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f2ea321c-3f27-4e3c-b893-f26aad882b50 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2470.899265] env[61868]: DEBUG nova.compute.manager [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2470.899509] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2470.900246] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e49196-ed61-490e-a548-7e02b0c2ebc4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2470.907975] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2470.909172] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-663532e3-dc1b-4e18-9b30-706f066dedad {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2470.910820] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2470.910998] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2470.911791] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75571397-b37f-461a-8c16-23de78a1d651 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2470.917952] env[61868]: DEBUG oslo_vmware.api [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Waiting for the task: (returnval){ [ 2470.917952] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]529b7689-ff1d-060d-a22f-abf99e076ac2" [ 2470.917952] env[61868]: _type = "Task" [ 2470.917952] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2470.929530] env[61868]: DEBUG oslo_vmware.api [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]529b7689-ff1d-060d-a22f-abf99e076ac2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2470.985447] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2470.985723] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2470.985861] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Deleting the datastore file [datastore2] 8484d90b-13a3-41af-a88a-856a8770a4ce {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2470.986148] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9040b84c-5acc-496c-b807-e72d4866cb38 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2470.992753] env[61868]: DEBUG oslo_vmware.api [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for the task: (returnval){ [ 2470.992753] env[61868]: value = "task-41231" [ 2470.992753] env[61868]: _type = "Task" [ 2470.992753] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2471.001497] env[61868]: DEBUG oslo_vmware.api [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': task-41231, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2471.428615] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2471.428887] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Creating directory with path [datastore2] vmware_temp/e9c2debe-367d-4827-bedf-8c8a454d0d26/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2471.429126] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ccc140f0-fc99-4444-95a5-ac3378065793 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2471.442140] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Created directory with path [datastore2] vmware_temp/e9c2debe-367d-4827-bedf-8c8a454d0d26/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2471.442373] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Fetch image to [datastore2] vmware_temp/e9c2debe-367d-4827-bedf-8c8a454d0d26/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2471.442559] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/e9c2debe-367d-4827-bedf-8c8a454d0d26/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2471.443355] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4d116dc-248e-4ffc-86c2-c98ff17ba834 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2471.450670] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33023b05-861c-4aaf-88bb-b873c9ce5129 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2471.460530] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ab4910-dd63-4f5e-a125-9013a0afb1bc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2471.492993] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8de093bf-986d-40ba-a237-a1648e081420 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2471.505043] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5aa46c52-c37d-43d2-86a4-1ef6c106197d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2471.506917] env[61868]: DEBUG oslo_vmware.api [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': task-41231, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067529} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2471.507381] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2471.507381] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2471.507501] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2471.507693] env[61868]: INFO nova.compute.manager [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2471.509980] env[61868]: DEBUG nova.compute.claims [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2471.509980] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2471.510422] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2471.529615] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2471.580034] env[61868]: DEBUG oslo_vmware.rw_handles [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e9c2debe-367d-4827-bedf-8c8a454d0d26/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2471.636328] env[61868]: DEBUG oslo_vmware.rw_handles [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2471.636527] env[61868]: DEBUG oslo_vmware.rw_handles [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e9c2debe-367d-4827-bedf-8c8a454d0d26/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2471.683812] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d8a7da6-a550-43a8-8800-20846f6cc40f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2471.691695] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a749f4-bd82-4cc9-9097-d344d196c0ac {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2471.724029] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0735e8c6-6ab7-49d3-8a74-0b7a564d847b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2471.733137] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38025ef1-7ba3-4035-b7ab-1585442de45d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2471.748934] env[61868]: DEBUG nova.compute.provider_tree [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2471.758706] env[61868]: DEBUG nova.scheduler.client.report [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2471.779139] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.269s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2471.779726] env[61868]: ERROR nova.compute.manager [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2471.779726] env[61868]: Faults: ['InvalidArgument'] [ 2471.779726] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Traceback (most recent call last): [ 2471.779726] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2471.779726] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] self.driver.spawn(context, instance, image_meta, [ 2471.779726] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2471.779726] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2471.779726] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2471.779726] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] self._fetch_image_if_missing(context, vi) [ 2471.779726] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2471.779726] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] image_cache(vi, tmp_image_ds_loc) [ 2471.779726] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2471.779726] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] vm_util.copy_virtual_disk( [ 2471.779726] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2471.779726] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] session._wait_for_task(vmdk_copy_task) [ 2471.779726] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2471.779726] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] return self.wait_for_task(task_ref) [ 2471.779726] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2471.779726] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] return evt.wait() [ 2471.779726] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2471.779726] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] result = hub.switch() [ 2471.779726] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2471.779726] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] return self.greenlet.switch() [ 2471.779726] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2471.779726] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] self.f(*self.args, **self.kw) [ 2471.779726] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2471.779726] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] raise exceptions.translate_fault(task_info.error) [ 2471.779726] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2471.779726] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Faults: ['InvalidArgument'] [ 2471.779726] env[61868]: ERROR nova.compute.manager [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] [ 2471.780732] env[61868]: DEBUG nova.compute.utils [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2471.782339] env[61868]: DEBUG nova.compute.manager [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Build of instance 8484d90b-13a3-41af-a88a-856a8770a4ce was re-scheduled: A specified parameter was not correct: fileType [ 2471.782339] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2471.782761] env[61868]: DEBUG nova.compute.manager [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2471.782942] env[61868]: DEBUG nova.compute.manager [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2471.783115] env[61868]: DEBUG nova.compute.manager [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2471.783281] env[61868]: DEBUG nova.network.neutron [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2471.957729] env[61868]: DEBUG oslo_concurrency.lockutils [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "b547b52d-bbb7-4211-b784-5fa278f852eb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2471.957956] env[61868]: DEBUG oslo_concurrency.lockutils [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "b547b52d-bbb7-4211-b784-5fa278f852eb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2471.969969] env[61868]: DEBUG nova.compute.manager [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2472.030974] env[61868]: DEBUG oslo_concurrency.lockutils [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2472.031239] env[61868]: DEBUG oslo_concurrency.lockutils [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2472.032862] env[61868]: INFO nova.compute.claims [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2472.058632] env[61868]: DEBUG nova.network.neutron [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2472.075815] env[61868]: INFO nova.compute.manager [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Took 0.29 seconds to deallocate network for instance. [ 2472.193730] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce73b245-8e04-421c-afc0-76d236944cd8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2472.197245] env[61868]: INFO nova.scheduler.client.report [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Deleted allocations for instance 8484d90b-13a3-41af-a88a-856a8770a4ce [ 2472.214771] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a571906b-5e6e-47f0-b57d-7341cbb127bc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2472.219384] env[61868]: DEBUG oslo_concurrency.lockutils [None req-d09e6d4f-cbd3-4252-902d-e7d55d94847f tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "8484d90b-13a3-41af-a88a-856a8770a4ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 462.225s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2472.219663] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2d1312ea-439e-4438-b08e-8445e6bba006 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "8484d90b-13a3-41af-a88a-856a8770a4ce" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 266.674s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2472.219883] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2d1312ea-439e-4438-b08e-8445e6bba006 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "8484d90b-13a3-41af-a88a-856a8770a4ce-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2472.220178] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2d1312ea-439e-4438-b08e-8445e6bba006 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "8484d90b-13a3-41af-a88a-856a8770a4ce-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2472.220338] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2d1312ea-439e-4438-b08e-8445e6bba006 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "8484d90b-13a3-41af-a88a-856a8770a4ce-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2472.248815] env[61868]: INFO nova.compute.manager [None req-2d1312ea-439e-4438-b08e-8445e6bba006 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Terminating instance [ 2472.250916] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97882342-70c7-4e58-8ec9-ae1c5f9216b3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2472.254396] env[61868]: DEBUG nova.compute.manager [None req-2d1312ea-439e-4438-b08e-8445e6bba006 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2472.254605] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-2d1312ea-439e-4438-b08e-8445e6bba006 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2472.254873] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b2e028eb-4f47-4cdd-9ef2-b2e028044c80 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2472.263465] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98f56301-9633-4377-899f-a9690916705d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2472.270479] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-677125ff-0302-4cdd-86eb-fd98ff641dd6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2472.291795] env[61868]: DEBUG nova.compute.provider_tree [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2472.299094] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-2d1312ea-439e-4438-b08e-8445e6bba006 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8484d90b-13a3-41af-a88a-856a8770a4ce could not be found. [ 2472.299353] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-2d1312ea-439e-4438-b08e-8445e6bba006 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2472.299541] env[61868]: INFO nova.compute.manager [None req-2d1312ea-439e-4438-b08e-8445e6bba006 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2472.299954] env[61868]: DEBUG oslo.service.loopingcall [None req-2d1312ea-439e-4438-b08e-8445e6bba006 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2472.301703] env[61868]: DEBUG nova.scheduler.client.report [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2472.304998] env[61868]: DEBUG nova.compute.manager [-] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2472.305117] env[61868]: DEBUG nova.network.neutron [-] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2472.325093] env[61868]: DEBUG oslo_concurrency.lockutils [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.292s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2472.325093] env[61868]: DEBUG nova.compute.manager [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2472.336151] env[61868]: DEBUG nova.network.neutron [-] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2472.353271] env[61868]: INFO nova.compute.manager [-] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] Took 0.05 seconds to deallocate network for instance. [ 2472.368793] env[61868]: DEBUG nova.compute.utils [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2472.370383] env[61868]: DEBUG nova.compute.manager [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2472.370550] env[61868]: DEBUG nova.network.neutron [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2472.384225] env[61868]: DEBUG nova.compute.manager [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2472.418123] env[61868]: DEBUG nova.policy [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '424b508614194ac2ad15e8cb62f2d041', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6f518980782c4dc5ac6efe31af19af16', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 2472.454459] env[61868]: DEBUG nova.compute.manager [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2472.459964] env[61868]: DEBUG oslo_concurrency.lockutils [None req-2d1312ea-439e-4438-b08e-8445e6bba006 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "8484d90b-13a3-41af-a88a-856a8770a4ce" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.240s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2472.460869] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "8484d90b-13a3-41af-a88a-856a8770a4ce" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 103.816s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2472.461057] env[61868]: INFO nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 8484d90b-13a3-41af-a88a-856a8770a4ce] During sync_power_state the instance has a pending task (deleting). Skip. [ 2472.461231] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "8484d90b-13a3-41af-a88a-856a8770a4ce" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2472.477545] env[61868]: DEBUG nova.virt.hardware [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2472.477777] env[61868]: DEBUG nova.virt.hardware [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2472.477931] env[61868]: DEBUG nova.virt.hardware [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2472.478113] env[61868]: DEBUG nova.virt.hardware [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2472.478256] env[61868]: DEBUG nova.virt.hardware [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2472.478401] env[61868]: DEBUG nova.virt.hardware [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2472.478609] env[61868]: DEBUG nova.virt.hardware [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2472.478766] env[61868]: DEBUG nova.virt.hardware [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2472.478931] env[61868]: DEBUG nova.virt.hardware [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2472.479091] env[61868]: DEBUG nova.virt.hardware [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2472.479295] env[61868]: DEBUG nova.virt.hardware [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2472.480383] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a98952-df0c-4ae0-b6d7-88e191692f98 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2472.494604] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1629c51-2c12-4ec6-9bbd-4e85a08cf10d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2472.689397] env[61868]: DEBUG nova.network.neutron [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Successfully created port: 5527610e-ccdb-4f39-a124-4c2649616129 {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2473.203431] env[61868]: DEBUG nova.network.neutron [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Successfully updated port: 5527610e-ccdb-4f39-a124-4c2649616129 {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2473.215438] env[61868]: DEBUG oslo_concurrency.lockutils [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "refresh_cache-b547b52d-bbb7-4211-b784-5fa278f852eb" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2473.215588] env[61868]: DEBUG oslo_concurrency.lockutils [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquired lock "refresh_cache-b547b52d-bbb7-4211-b784-5fa278f852eb" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2473.215774] env[61868]: DEBUG nova.network.neutron [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 2473.260891] env[61868]: DEBUG nova.network.neutron [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 2473.347177] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2473.366831] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2473.367023] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2473.439796] env[61868]: DEBUG nova.network.neutron [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Updating instance_info_cache with network_info: [{"id": "5527610e-ccdb-4f39-a124-4c2649616129", "address": "fa:16:3e:c9:59:a2", "network": {"id": "6da51a65-4f1d-44b5-8bb8-b049cebe1cc1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2119750877-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "6f518980782c4dc5ac6efe31af19af16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5527610e-cc", "ovs_interfaceid": "5527610e-ccdb-4f39-a124-4c2649616129", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2473.454584] env[61868]: DEBUG oslo_concurrency.lockutils [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Releasing lock "refresh_cache-b547b52d-bbb7-4211-b784-5fa278f852eb" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2473.455032] env[61868]: DEBUG nova.compute.manager [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Instance network_info: |[{"id": "5527610e-ccdb-4f39-a124-4c2649616129", "address": "fa:16:3e:c9:59:a2", "network": {"id": "6da51a65-4f1d-44b5-8bb8-b049cebe1cc1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2119750877-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "6f518980782c4dc5ac6efe31af19af16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5527610e-cc", "ovs_interfaceid": "5527610e-ccdb-4f39-a124-4c2649616129", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2473.455697] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c9:59:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c1b8689-a9b4-4972-beb9-6a1c8de1dc88', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5527610e-ccdb-4f39-a124-4c2649616129', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2473.463455] env[61868]: DEBUG oslo.service.loopingcall [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2473.463966] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2473.464222] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d89265a4-6162-4aed-a777-56676422b910 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2473.486493] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2473.486493] env[61868]: value = "task-41232" [ 2473.486493] env[61868]: _type = "Task" [ 2473.486493] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2473.495578] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41232, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2473.996796] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41232, 'name': CreateVM_Task, 'duration_secs': 0.295853} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2473.996980] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2473.997528] env[61868]: DEBUG oslo_concurrency.lockutils [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2473.997784] env[61868]: DEBUG oslo_concurrency.lockutils [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2474.000635] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-254b93e8-38b4-43cf-a140-2b472fea55e3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2474.029599] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Reconfiguring VM instance to enable vnc on port - 5900 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 2474.029893] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95cd85fa-f00c-4b08-bf34-214b9561229b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2474.046080] env[61868]: DEBUG oslo_vmware.api [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for the task: (returnval){ [ 2474.046080] env[61868]: value = "task-41233" [ 2474.046080] env[61868]: _type = "Task" [ 2474.046080] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2474.054548] env[61868]: DEBUG oslo_vmware.api [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': task-41233, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2474.125516] env[61868]: DEBUG nova.compute.manager [req-5fb1787a-5147-4f98-8941-37325dbac07e req-44992f24-967b-4f92-94c3-7adcc0c0f414 service nova] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Received event network-vif-plugged-5527610e-ccdb-4f39-a124-4c2649616129 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2474.125845] env[61868]: DEBUG oslo_concurrency.lockutils [req-5fb1787a-5147-4f98-8941-37325dbac07e req-44992f24-967b-4f92-94c3-7adcc0c0f414 service nova] Acquiring lock "b547b52d-bbb7-4211-b784-5fa278f852eb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2474.125986] env[61868]: DEBUG oslo_concurrency.lockutils [req-5fb1787a-5147-4f98-8941-37325dbac07e req-44992f24-967b-4f92-94c3-7adcc0c0f414 service nova] Lock "b547b52d-bbb7-4211-b784-5fa278f852eb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2474.126258] env[61868]: DEBUG oslo_concurrency.lockutils [req-5fb1787a-5147-4f98-8941-37325dbac07e req-44992f24-967b-4f92-94c3-7adcc0c0f414 service nova] Lock "b547b52d-bbb7-4211-b784-5fa278f852eb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2474.126461] env[61868]: DEBUG nova.compute.manager [req-5fb1787a-5147-4f98-8941-37325dbac07e req-44992f24-967b-4f92-94c3-7adcc0c0f414 service nova] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] No waiting events found dispatching network-vif-plugged-5527610e-ccdb-4f39-a124-4c2649616129 {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2474.126660] env[61868]: WARNING nova.compute.manager [req-5fb1787a-5147-4f98-8941-37325dbac07e req-44992f24-967b-4f92-94c3-7adcc0c0f414 service nova] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Received unexpected event network-vif-plugged-5527610e-ccdb-4f39-a124-4c2649616129 for instance with vm_state building and task_state spawning. [ 2474.126862] env[61868]: DEBUG nova.compute.manager [req-5fb1787a-5147-4f98-8941-37325dbac07e req-44992f24-967b-4f92-94c3-7adcc0c0f414 service nova] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Received event network-changed-5527610e-ccdb-4f39-a124-4c2649616129 {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2474.127112] env[61868]: DEBUG nova.compute.manager [req-5fb1787a-5147-4f98-8941-37325dbac07e req-44992f24-967b-4f92-94c3-7adcc0c0f414 service nova] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Refreshing instance network info cache due to event network-changed-5527610e-ccdb-4f39-a124-4c2649616129. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2474.127322] env[61868]: DEBUG oslo_concurrency.lockutils [req-5fb1787a-5147-4f98-8941-37325dbac07e req-44992f24-967b-4f92-94c3-7adcc0c0f414 service nova] Acquiring lock "refresh_cache-b547b52d-bbb7-4211-b784-5fa278f852eb" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2474.127463] env[61868]: DEBUG oslo_concurrency.lockutils [req-5fb1787a-5147-4f98-8941-37325dbac07e req-44992f24-967b-4f92-94c3-7adcc0c0f414 service nova] Acquired lock "refresh_cache-b547b52d-bbb7-4211-b784-5fa278f852eb" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2474.127623] env[61868]: DEBUG nova.network.neutron [req-5fb1787a-5147-4f98-8941-37325dbac07e req-44992f24-967b-4f92-94c3-7adcc0c0f414 service nova] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Refreshing network info cache for port 5527610e-ccdb-4f39-a124-4c2649616129 {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 2474.351096] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2474.361620] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2474.361839] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2474.362000] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2474.362146] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2474.363300] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20565818-a8f7-4550-85a8-4ed6f0093a41 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2474.366633] env[61868]: DEBUG nova.network.neutron [req-5fb1787a-5147-4f98-8941-37325dbac07e req-44992f24-967b-4f92-94c3-7adcc0c0f414 service nova] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Updated VIF entry in instance network info cache for port 5527610e-ccdb-4f39-a124-4c2649616129. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 2474.366975] env[61868]: DEBUG nova.network.neutron [req-5fb1787a-5147-4f98-8941-37325dbac07e req-44992f24-967b-4f92-94c3-7adcc0c0f414 service nova] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Updating instance_info_cache with network_info: [{"id": "5527610e-ccdb-4f39-a124-4c2649616129", "address": "fa:16:3e:c9:59:a2", "network": {"id": "6da51a65-4f1d-44b5-8bb8-b049cebe1cc1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2119750877-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "6f518980782c4dc5ac6efe31af19af16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5527610e-cc", "ovs_interfaceid": "5527610e-ccdb-4f39-a124-4c2649616129", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2474.374167] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1117fbd-bcc8-4545-811e-fa6db7c4d67e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2474.378852] env[61868]: DEBUG oslo_concurrency.lockutils [req-5fb1787a-5147-4f98-8941-37325dbac07e req-44992f24-967b-4f92-94c3-7adcc0c0f414 service nova] Releasing lock "refresh_cache-b547b52d-bbb7-4211-b784-5fa278f852eb" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2474.390096] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4ab8293-a3b6-4e4a-9a6b-5235d3eb8d8d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2474.396741] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b414aae5-7ee4-4b68-b7b1-a95adb2ec91a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2474.426225] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181889MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2474.426378] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2474.426579] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2474.478007] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 803c1598-d083-405d-80a1-6adf3fbd2f96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2474.478270] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance e97ea5c4-163c-4870-9744-3f20cf57f53f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2474.478318] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2474.478399] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8b79cee2-64a5-45fa-b99d-3aea86d4dc91 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2474.478517] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance b547b52d-bbb7-4211-b784-5fa278f852eb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2474.478694] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2474.478829] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2474.555529] env[61868]: DEBUG oslo_vmware.api [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': task-41233, 'name': ReconfigVM_Task, 'duration_secs': 0.101947} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2474.556874] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Reconfigured VM instance to enable vnc on port - 5900 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 2474.557229] env[61868]: DEBUG oslo_concurrency.lockutils [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.559s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2474.557675] env[61868]: DEBUG oslo_concurrency.lockutils [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2474.557919] env[61868]: DEBUG oslo_concurrency.lockutils [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2474.558382] env[61868]: DEBUG oslo_concurrency.lockutils [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2474.559497] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc086993-fdfb-4796-b59f-bd6d7ff08dc0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2474.562154] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-704a0860-3924-48e2-9b14-de8f30f9be22 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2474.568285] env[61868]: DEBUG oslo_vmware.api [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for the task: (returnval){ [ 2474.568285] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52e8b933-0a6e-04ad-00eb-779abf8116c1" [ 2474.568285] env[61868]: _type = "Task" [ 2474.568285] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2474.569344] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a37de501-1530-49e3-83c3-be5f20ad438f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2474.582242] env[61868]: DEBUG oslo_vmware.api [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52e8b933-0a6e-04ad-00eb-779abf8116c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2474.607177] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffbd1de7-1379-41e2-afb0-152f2f824394 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2474.614939] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a3fe315-5903-4cf0-bee8-588b432d3540 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2474.628203] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2474.636920] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2474.654484] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2474.654691] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.228s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2475.079617] env[61868]: DEBUG oslo_concurrency.lockutils [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2475.079882] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2475.080138] env[61868]: DEBUG oslo_concurrency.lockutils [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2475.655698] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2476.351240] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2476.351469] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 2477.352705] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2521.301977] env[61868]: WARNING oslo_vmware.rw_handles [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2521.301977] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2521.301977] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2521.301977] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2521.301977] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2521.301977] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 2521.301977] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2521.301977] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2521.301977] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2521.301977] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2521.301977] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2521.301977] env[61868]: ERROR oslo_vmware.rw_handles [ 2521.302847] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/e9c2debe-367d-4827-bedf-8c8a454d0d26/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2521.304853] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2521.304916] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Copying Virtual Disk [datastore2] vmware_temp/e9c2debe-367d-4827-bedf-8c8a454d0d26/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/e9c2debe-367d-4827-bedf-8c8a454d0d26/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2521.305191] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d348d1f9-8432-42ad-bdf7-be51d4df8a19 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2521.314083] env[61868]: DEBUG oslo_vmware.api [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Waiting for the task: (returnval){ [ 2521.314083] env[61868]: value = "task-41234" [ 2521.314083] env[61868]: _type = "Task" [ 2521.314083] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2521.322574] env[61868]: DEBUG oslo_vmware.api [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Task: {'id': task-41234, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2521.826204] env[61868]: DEBUG oslo_vmware.exceptions [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2521.826488] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2521.827034] env[61868]: ERROR nova.compute.manager [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2521.827034] env[61868]: Faults: ['InvalidArgument'] [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Traceback (most recent call last): [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] yield resources [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] self.driver.spawn(context, instance, image_meta, [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] self._fetch_image_if_missing(context, vi) [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] image_cache(vi, tmp_image_ds_loc) [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] vm_util.copy_virtual_disk( [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] session._wait_for_task(vmdk_copy_task) [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] return self.wait_for_task(task_ref) [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] return evt.wait() [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] result = hub.switch() [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] return self.greenlet.switch() [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] self.f(*self.args, **self.kw) [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] raise exceptions.translate_fault(task_info.error) [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Faults: ['InvalidArgument'] [ 2521.827034] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] [ 2521.828020] env[61868]: INFO nova.compute.manager [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Terminating instance [ 2521.828924] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2521.829127] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2521.829387] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aebb45bc-c0e7-46c5-b02a-1090070b3075 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2521.831688] env[61868]: DEBUG nova.compute.manager [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2521.831882] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2521.832724] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-915a5120-4f99-43db-8419-526054c7b9e4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2521.839906] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2521.840157] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-00335c71-0cbd-475e-b31f-967b230498a4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2521.842598] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2521.842788] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2521.843721] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eda182c3-5a08-4aad-af6d-ef51ee07a339 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2521.849505] env[61868]: DEBUG oslo_vmware.api [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for the task: (returnval){ [ 2521.849505] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52dac4eb-9fa7-e165-3ee7-7e422194c3ac" [ 2521.849505] env[61868]: _type = "Task" [ 2521.849505] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2521.856311] env[61868]: DEBUG oslo_vmware.api [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52dac4eb-9fa7-e165-3ee7-7e422194c3ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2521.914625] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2521.914937] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2521.915180] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Deleting the datastore file [datastore2] 803c1598-d083-405d-80a1-6adf3fbd2f96 {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2521.915497] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-313cdaf6-c358-4a96-ac33-55224faa15dc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2521.921844] env[61868]: DEBUG oslo_vmware.api [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Waiting for the task: (returnval){ [ 2521.921844] env[61868]: value = "task-41236" [ 2521.921844] env[61868]: _type = "Task" [ 2521.921844] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2521.931162] env[61868]: DEBUG oslo_vmware.api [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Task: {'id': task-41236, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2522.359844] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2522.360221] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Creating directory with path [datastore2] vmware_temp/09eee50f-f65d-49a5-98c7-9e13b31e92cb/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2522.360335] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c8a9f1b6-8d19-4d5a-9706-90e5cb500714 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2522.371996] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Created directory with path [datastore2] vmware_temp/09eee50f-f65d-49a5-98c7-9e13b31e92cb/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2522.372222] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Fetch image to [datastore2] vmware_temp/09eee50f-f65d-49a5-98c7-9e13b31e92cb/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2522.372409] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/09eee50f-f65d-49a5-98c7-9e13b31e92cb/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2522.373152] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b220b1-ac70-4062-9f25-686550334167 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2522.380230] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a911f11-67ac-4c88-adfb-6a79af3f8755 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2522.389171] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8334ee7e-9938-45ee-8404-e6072935c7e5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2522.419043] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7de14de-d766-4405-845c-ddd9ccc14d2f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2522.428433] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-90939b03-5b1d-4dee-8d87-43a3cf0ab625 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2522.432775] env[61868]: DEBUG oslo_vmware.api [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Task: {'id': task-41236, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078826} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2522.433308] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2522.433516] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2522.433703] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2522.433907] env[61868]: INFO nova.compute.manager [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2522.435917] env[61868]: DEBUG nova.compute.claims [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2522.436103] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2522.436321] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2522.453853] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2522.504125] env[61868]: DEBUG oslo_vmware.rw_handles [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/09eee50f-f65d-49a5-98c7-9e13b31e92cb/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2522.564675] env[61868]: DEBUG oslo_vmware.rw_handles [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2522.565125] env[61868]: DEBUG oslo_vmware.rw_handles [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/09eee50f-f65d-49a5-98c7-9e13b31e92cb/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2522.597492] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eedd6fe-30c6-418f-8421-ce8ed984841f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2522.605049] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0930e73c-c59a-4fb2-8cc1-6a7ae65744e9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2522.635126] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d775036-07d4-4533-bcca-a6c084bd6353 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2522.642697] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ea38fa9-087c-42ba-8822-198f263da31b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2522.657913] env[61868]: DEBUG nova.compute.provider_tree [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2522.666719] env[61868]: DEBUG nova.scheduler.client.report [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2522.682685] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.246s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2522.683219] env[61868]: ERROR nova.compute.manager [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2522.683219] env[61868]: Faults: ['InvalidArgument'] [ 2522.683219] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Traceback (most recent call last): [ 2522.683219] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2522.683219] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] self.driver.spawn(context, instance, image_meta, [ 2522.683219] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2522.683219] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2522.683219] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2522.683219] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] self._fetch_image_if_missing(context, vi) [ 2522.683219] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2522.683219] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] image_cache(vi, tmp_image_ds_loc) [ 2522.683219] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2522.683219] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] vm_util.copy_virtual_disk( [ 2522.683219] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2522.683219] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] session._wait_for_task(vmdk_copy_task) [ 2522.683219] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2522.683219] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] return self.wait_for_task(task_ref) [ 2522.683219] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2522.683219] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] return evt.wait() [ 2522.683219] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2522.683219] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] result = hub.switch() [ 2522.683219] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2522.683219] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] return self.greenlet.switch() [ 2522.683219] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2522.683219] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] self.f(*self.args, **self.kw) [ 2522.683219] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2522.683219] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] raise exceptions.translate_fault(task_info.error) [ 2522.683219] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2522.683219] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Faults: ['InvalidArgument'] [ 2522.683219] env[61868]: ERROR nova.compute.manager [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] [ 2522.684056] env[61868]: DEBUG nova.compute.utils [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2522.685332] env[61868]: DEBUG nova.compute.manager [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Build of instance 803c1598-d083-405d-80a1-6adf3fbd2f96 was re-scheduled: A specified parameter was not correct: fileType [ 2522.685332] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2522.685777] env[61868]: DEBUG nova.compute.manager [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2522.686022] env[61868]: DEBUG nova.compute.manager [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2522.686220] env[61868]: DEBUG nova.compute.manager [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2522.686391] env[61868]: DEBUG nova.network.neutron [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2522.953202] env[61868]: DEBUG nova.network.neutron [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2522.968137] env[61868]: INFO nova.compute.manager [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Took 0.28 seconds to deallocate network for instance. [ 2523.064331] env[61868]: INFO nova.scheduler.client.report [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Deleted allocations for instance 803c1598-d083-405d-80a1-6adf3fbd2f96 [ 2523.083073] env[61868]: DEBUG oslo_concurrency.lockutils [None req-54744886-2bc9-4d76-b404-41b434ab369e tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "803c1598-d083-405d-80a1-6adf3fbd2f96" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 454.700s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2523.083319] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9396c59e-502a-4c15-a673-00f98b2e6eff tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "803c1598-d083-405d-80a1-6adf3fbd2f96" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 259.575s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2523.083534] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9396c59e-502a-4c15-a673-00f98b2e6eff tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Acquiring lock "803c1598-d083-405d-80a1-6adf3fbd2f96-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2523.083739] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9396c59e-502a-4c15-a673-00f98b2e6eff tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "803c1598-d083-405d-80a1-6adf3fbd2f96-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2523.083905] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9396c59e-502a-4c15-a673-00f98b2e6eff tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "803c1598-d083-405d-80a1-6adf3fbd2f96-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2523.086560] env[61868]: INFO nova.compute.manager [None req-9396c59e-502a-4c15-a673-00f98b2e6eff tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Terminating instance [ 2523.088967] env[61868]: DEBUG nova.compute.manager [None req-9396c59e-502a-4c15-a673-00f98b2e6eff tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2523.089207] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9396c59e-502a-4c15-a673-00f98b2e6eff tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2523.089772] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cd18d4f0-c202-4f99-bad4-6bb2f2f39012 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2523.100710] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb87f2eb-9f15-4547-ae64-69a310fc4317 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2523.125100] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-9396c59e-502a-4c15-a673-00f98b2e6eff tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 803c1598-d083-405d-80a1-6adf3fbd2f96 could not be found. [ 2523.125310] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-9396c59e-502a-4c15-a673-00f98b2e6eff tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2523.125491] env[61868]: INFO nova.compute.manager [None req-9396c59e-502a-4c15-a673-00f98b2e6eff tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2523.125736] env[61868]: DEBUG oslo.service.loopingcall [None req-9396c59e-502a-4c15-a673-00f98b2e6eff tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2523.125972] env[61868]: DEBUG nova.compute.manager [-] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2523.126077] env[61868]: DEBUG nova.network.neutron [-] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2523.149662] env[61868]: DEBUG nova.network.neutron [-] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2523.158737] env[61868]: INFO nova.compute.manager [-] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] Took 0.03 seconds to deallocate network for instance. [ 2523.249897] env[61868]: DEBUG oslo_concurrency.lockutils [None req-9396c59e-502a-4c15-a673-00f98b2e6eff tempest-ServersTestJSON-1722207346 tempest-ServersTestJSON-1722207346-project-member] Lock "803c1598-d083-405d-80a1-6adf3fbd2f96" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.166s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2523.250948] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "803c1598-d083-405d-80a1-6adf3fbd2f96" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 154.606s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2523.251234] env[61868]: INFO nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 803c1598-d083-405d-80a1-6adf3fbd2f96] During sync_power_state the instance has a pending task (deleting). Skip. [ 2523.251561] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "803c1598-d083-405d-80a1-6adf3fbd2f96" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2523.350561] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2528.351928] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2528.352264] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2528.352264] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 2528.365651] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2528.365801] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2528.365934] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2528.366064] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2528.366187] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 2530.361556] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2533.350917] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2534.352031] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2535.352089] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2535.362822] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2535.363043] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2535.363213] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2535.363365] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2535.364923] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2b0d52c-a779-420e-a5c6-78212808c994 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2535.373541] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2570a35a-29c2-4507-acc7-5ee054a681c1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2535.387406] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a19d85a-9a0b-47c4-8d9f-a91367ebcecb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2535.394847] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb21b09e-c6a5-4630-b159-0574a09aef55 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2535.425235] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181924MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2535.425484] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2535.425568] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2535.472986] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance e97ea5c4-163c-4870-9744-3f20cf57f53f actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2535.473150] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2535.473273] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8b79cee2-64a5-45fa-b99d-3aea86d4dc91 actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2535.473390] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance b547b52d-bbb7-4211-b784-5fa278f852eb actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2535.473568] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2535.473707] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1024MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2535.533154] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-618b7328-bf5e-46f1-b2b0-14f9c4eb4327 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2535.540994] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e72225f0-d1d3-4d27-86b4-1f579e605574 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2535.571568] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d852686f-d977-4cdf-ba01-b88604057c84 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2535.579220] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02bb5193-9567-463d-810e-85da17a47bb5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2535.592346] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2535.601039] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2535.617755] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2535.617939] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.192s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2536.617491] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2536.617952] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 2537.352637] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2538.352417] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2571.065354] env[61868]: WARNING oslo_vmware.rw_handles [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2571.065354] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2571.065354] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2571.065354] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2571.065354] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2571.065354] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 2571.065354] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2571.065354] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2571.065354] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2571.065354] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2571.065354] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2571.065354] env[61868]: ERROR oslo_vmware.rw_handles [ 2571.066042] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/09eee50f-f65d-49a5-98c7-9e13b31e92cb/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2571.068070] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2571.068368] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Copying Virtual Disk [datastore2] vmware_temp/09eee50f-f65d-49a5-98c7-9e13b31e92cb/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/09eee50f-f65d-49a5-98c7-9e13b31e92cb/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2571.068661] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-975e241d-690b-4638-baa5-f22d63eacc3d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2571.076865] env[61868]: DEBUG oslo_vmware.api [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for the task: (returnval){ [ 2571.076865] env[61868]: value = "task-41237" [ 2571.076865] env[61868]: _type = "Task" [ 2571.076865] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2571.085305] env[61868]: DEBUG oslo_vmware.api [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': task-41237, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2571.587433] env[61868]: DEBUG oslo_vmware.exceptions [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2571.587619] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2571.588102] env[61868]: ERROR nova.compute.manager [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2571.588102] env[61868]: Faults: ['InvalidArgument'] [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Traceback (most recent call last): [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] yield resources [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] self.driver.spawn(context, instance, image_meta, [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] self._fetch_image_if_missing(context, vi) [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] image_cache(vi, tmp_image_ds_loc) [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] vm_util.copy_virtual_disk( [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] session._wait_for_task(vmdk_copy_task) [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] return self.wait_for_task(task_ref) [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] return evt.wait() [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] result = hub.switch() [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] return self.greenlet.switch() [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] self.f(*self.args, **self.kw) [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] raise exceptions.translate_fault(task_info.error) [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Faults: ['InvalidArgument'] [ 2571.588102] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] [ 2571.589046] env[61868]: INFO nova.compute.manager [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Terminating instance [ 2571.590022] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2571.590227] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2571.590472] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dcd784f2-c1f1-4351-9a00-d4d392c4b78c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2571.593133] env[61868]: DEBUG nova.compute.manager [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2571.593336] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2571.594080] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc84f938-bee4-45c9-a539-bfbc67c4c458 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2571.601229] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2571.601490] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ce0b89fd-1167-461a-a035-e8be30f7cac6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2571.603712] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2571.603890] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2571.604976] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d3f5b89-83bf-4dc5-bb90-f0c49dacd7e9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2571.610140] env[61868]: DEBUG oslo_vmware.api [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Waiting for the task: (returnval){ [ 2571.610140] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52b94d2a-7013-aa5c-7763-c8d85941e791" [ 2571.610140] env[61868]: _type = "Task" [ 2571.610140] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2571.619157] env[61868]: DEBUG oslo_vmware.api [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52b94d2a-7013-aa5c-7763-c8d85941e791, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2571.673371] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2571.673653] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2571.673849] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Deleting the datastore file [datastore2] e97ea5c4-163c-4870-9744-3f20cf57f53f {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2571.674143] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-83d76c73-1a33-4980-a9ce-b17713116dcd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2571.683044] env[61868]: DEBUG oslo_vmware.api [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for the task: (returnval){ [ 2571.683044] env[61868]: value = "task-41239" [ 2571.683044] env[61868]: _type = "Task" [ 2571.683044] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2571.691227] env[61868]: DEBUG oslo_vmware.api [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': task-41239, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2572.121432] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2572.121808] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Creating directory with path [datastore2] vmware_temp/dd6ea74d-60c1-49c8-b08e-efac38470792/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2572.121946] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-745e4f33-7eab-47b9-959e-c50a39408f6a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2572.133449] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Created directory with path [datastore2] vmware_temp/dd6ea74d-60c1-49c8-b08e-efac38470792/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2572.133696] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Fetch image to [datastore2] vmware_temp/dd6ea74d-60c1-49c8-b08e-efac38470792/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2572.133823] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/dd6ea74d-60c1-49c8-b08e-efac38470792/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2572.134569] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503f8afa-3b79-484d-afbc-388c429946a7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2572.141724] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3b1ce11-5a71-41b8-973a-6b8ec779b177 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2572.151268] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-385cddc8-f13c-4603-90f8-c80d40c83a8a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2572.182379] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad4a307c-a5c5-4f1e-bf60-d5238255b22f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2572.193738] env[61868]: DEBUG oslo_vmware.api [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': task-41239, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.087746} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2572.194237] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2572.194425] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2572.194598] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2572.194770] env[61868]: INFO nova.compute.manager [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2572.196268] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4021567e-f8e7-48a2-993d-f63fae91037d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2572.198167] env[61868]: DEBUG nova.compute.claims [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2572.198331] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2572.198551] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2572.222845] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2572.277558] env[61868]: DEBUG oslo_vmware.rw_handles [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dd6ea74d-60c1-49c8-b08e-efac38470792/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2572.334750] env[61868]: DEBUG oslo_vmware.rw_handles [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2572.334947] env[61868]: DEBUG oslo_vmware.rw_handles [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dd6ea74d-60c1-49c8-b08e-efac38470792/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2572.345622] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bd184dd-cc76-4f5c-b909-3d9d453ae867 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2572.353988] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a20e8f2-9ac7-47c4-82f9-1ea5efd5c97c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2572.383343] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db0899a-9616-4fa6-98f4-1d446eb68e75 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2572.390942] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b1f380d-b04c-4639-9f6e-1bf1e1cd30a0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2572.407232] env[61868]: DEBUG nova.compute.provider_tree [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2572.415717] env[61868]: DEBUG nova.scheduler.client.report [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2572.431494] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.233s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2572.432060] env[61868]: ERROR nova.compute.manager [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2572.432060] env[61868]: Faults: ['InvalidArgument'] [ 2572.432060] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Traceback (most recent call last): [ 2572.432060] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2572.432060] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] self.driver.spawn(context, instance, image_meta, [ 2572.432060] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2572.432060] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2572.432060] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2572.432060] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] self._fetch_image_if_missing(context, vi) [ 2572.432060] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2572.432060] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] image_cache(vi, tmp_image_ds_loc) [ 2572.432060] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2572.432060] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] vm_util.copy_virtual_disk( [ 2572.432060] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2572.432060] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] session._wait_for_task(vmdk_copy_task) [ 2572.432060] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2572.432060] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] return self.wait_for_task(task_ref) [ 2572.432060] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2572.432060] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] return evt.wait() [ 2572.432060] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2572.432060] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] result = hub.switch() [ 2572.432060] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2572.432060] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] return self.greenlet.switch() [ 2572.432060] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2572.432060] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] self.f(*self.args, **self.kw) [ 2572.432060] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2572.432060] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] raise exceptions.translate_fault(task_info.error) [ 2572.432060] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2572.432060] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Faults: ['InvalidArgument'] [ 2572.432060] env[61868]: ERROR nova.compute.manager [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] [ 2572.433820] env[61868]: DEBUG nova.compute.utils [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2572.436187] env[61868]: DEBUG nova.compute.manager [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Build of instance e97ea5c4-163c-4870-9744-3f20cf57f53f was re-scheduled: A specified parameter was not correct: fileType [ 2572.436187] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2572.436577] env[61868]: DEBUG nova.compute.manager [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2572.436751] env[61868]: DEBUG nova.compute.manager [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2572.436921] env[61868]: DEBUG nova.compute.manager [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2572.437103] env[61868]: DEBUG nova.network.neutron [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2572.699862] env[61868]: DEBUG nova.network.neutron [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2572.720394] env[61868]: INFO nova.compute.manager [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Took 0.28 seconds to deallocate network for instance. [ 2572.826168] env[61868]: INFO nova.scheduler.client.report [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Deleted allocations for instance e97ea5c4-163c-4870-9744-3f20cf57f53f [ 2572.847977] env[61868]: DEBUG oslo_concurrency.lockutils [None req-c86db6a8-e4a6-45e3-b0b1-e144de7a0459 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "e97ea5c4-163c-4870-9744-3f20cf57f53f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 494.280s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2572.848261] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ce5a1719-d4fa-4a4a-86b8-0034ab256511 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "e97ea5c4-163c-4870-9744-3f20cf57f53f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 298.363s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2572.848593] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ce5a1719-d4fa-4a4a-86b8-0034ab256511 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "e97ea5c4-163c-4870-9744-3f20cf57f53f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2572.848862] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ce5a1719-d4fa-4a4a-86b8-0034ab256511 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "e97ea5c4-163c-4870-9744-3f20cf57f53f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2572.849064] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ce5a1719-d4fa-4a4a-86b8-0034ab256511 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "e97ea5c4-163c-4870-9744-3f20cf57f53f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2572.851881] env[61868]: INFO nova.compute.manager [None req-ce5a1719-d4fa-4a4a-86b8-0034ab256511 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Terminating instance [ 2572.853553] env[61868]: DEBUG nova.compute.manager [None req-ce5a1719-d4fa-4a4a-86b8-0034ab256511 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2572.853741] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-ce5a1719-d4fa-4a4a-86b8-0034ab256511 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2572.854219] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ea3f2c8e-fd16-4a69-8524-058e3cf79424 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2572.863449] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c414ef48-4ab5-43fb-abdc-867a90a12e4a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2572.888516] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-ce5a1719-d4fa-4a4a-86b8-0034ab256511 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e97ea5c4-163c-4870-9744-3f20cf57f53f could not be found. [ 2572.888736] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-ce5a1719-d4fa-4a4a-86b8-0034ab256511 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2572.888920] env[61868]: INFO nova.compute.manager [None req-ce5a1719-d4fa-4a4a-86b8-0034ab256511 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2572.889169] env[61868]: DEBUG oslo.service.loopingcall [None req-ce5a1719-d4fa-4a4a-86b8-0034ab256511 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2572.889427] env[61868]: DEBUG nova.compute.manager [-] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2572.889524] env[61868]: DEBUG nova.network.neutron [-] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2572.915810] env[61868]: DEBUG nova.network.neutron [-] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2572.925086] env[61868]: INFO nova.compute.manager [-] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] Took 0.04 seconds to deallocate network for instance. [ 2573.016345] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ce5a1719-d4fa-4a4a-86b8-0034ab256511 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "e97ea5c4-163c-4870-9744-3f20cf57f53f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.168s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2573.017365] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "e97ea5c4-163c-4870-9744-3f20cf57f53f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 204.372s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2573.017635] env[61868]: INFO nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: e97ea5c4-163c-4870-9744-3f20cf57f53f] During sync_power_state the instance has a pending task (deleting). Skip. [ 2573.017867] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "e97ea5c4-163c-4870-9744-3f20cf57f53f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2574.646006] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56093657-9633-4cd2-8d4c-23c75fda82b6 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Acquiring lock "43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2584.352494] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2589.352151] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2589.352528] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2589.352528] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 2589.365708] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2589.365883] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2589.365993] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2589.366118] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 2591.360857] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2594.352055] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2595.351522] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2595.351751] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2595.361982] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2595.362269] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2595.362380] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2595.362569] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2595.364179] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4edeb334-0c0d-4526-b6e2-76a6afd11af1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2595.373256] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02229844-ca25-4314-b1bf-cdfaf3b93bc3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2595.388786] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5060ca8d-3c6a-445c-af3c-763e6e81b0a2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2595.395678] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67af2a90-8cb5-4903-8e01-6c9da07566d3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2595.424132] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181910MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2595.424273] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2595.424464] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2595.469930] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2595.470090] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8b79cee2-64a5-45fa-b99d-3aea86d4dc91 actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2595.470221] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance b547b52d-bbb7-4211-b784-5fa278f852eb actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2595.470395] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2595.470562] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2595.519424] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb14a8f-1c33-4044-a2b8-45febbd99793 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2595.528530] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-386e25d9-9cb1-475f-80b2-54d9012c153d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2595.557361] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08021398-dc4e-40d5-baa4-c05c8ee1b8a0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2595.564471] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e54138-e771-4d62-9bbf-8dd14c55775b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2595.577220] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2595.586055] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2595.603985] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2595.604197] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.180s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2596.599526] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2597.350795] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2597.352030] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2597.352030] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 2597.501573] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1bcfa582-401d-456f-8528-9ca478f9b9b7 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "8b79cee2-64a5-45fa-b99d-3aea86d4dc91" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2598.352108] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2621.083643] env[61868]: WARNING oslo_vmware.rw_handles [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2621.083643] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2621.083643] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2621.083643] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2621.083643] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2621.083643] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 2621.083643] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2621.083643] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2621.083643] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2621.083643] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2621.083643] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2621.083643] env[61868]: ERROR oslo_vmware.rw_handles [ 2621.084375] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/dd6ea74d-60c1-49c8-b08e-efac38470792/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2621.086120] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2621.086368] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Copying Virtual Disk [datastore2] vmware_temp/dd6ea74d-60c1-49c8-b08e-efac38470792/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/dd6ea74d-60c1-49c8-b08e-efac38470792/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2621.086642] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8bbdafb2-05a1-47ec-96b8-abadb981fc12 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2621.095290] env[61868]: DEBUG oslo_vmware.api [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Waiting for the task: (returnval){ [ 2621.095290] env[61868]: value = "task-41240" [ 2621.095290] env[61868]: _type = "Task" [ 2621.095290] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2621.103687] env[61868]: DEBUG oslo_vmware.api [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Task: {'id': task-41240, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2621.606022] env[61868]: DEBUG oslo_vmware.exceptions [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2621.606270] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2621.606842] env[61868]: ERROR nova.compute.manager [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2621.606842] env[61868]: Faults: ['InvalidArgument'] [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Traceback (most recent call last): [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] yield resources [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] self.driver.spawn(context, instance, image_meta, [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] self._fetch_image_if_missing(context, vi) [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] image_cache(vi, tmp_image_ds_loc) [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] vm_util.copy_virtual_disk( [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] session._wait_for_task(vmdk_copy_task) [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] return self.wait_for_task(task_ref) [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] return evt.wait() [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] result = hub.switch() [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] return self.greenlet.switch() [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] self.f(*self.args, **self.kw) [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] raise exceptions.translate_fault(task_info.error) [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Faults: ['InvalidArgument'] [ 2621.606842] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] [ 2621.607782] env[61868]: INFO nova.compute.manager [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Terminating instance [ 2621.609998] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Acquiring lock "refresh_cache-43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2621.610154] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Acquired lock "refresh_cache-43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2621.610320] env[61868]: DEBUG nova.network.neutron [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 2621.611811] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2621.612227] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2621.612483] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8cb03f4e-53bc-4acb-b51c-6008096f35de {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2621.621034] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2621.621207] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2621.622203] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-929abb20-fed0-4609-92f4-0574adeca002 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2621.627273] env[61868]: DEBUG oslo_vmware.api [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for the task: (returnval){ [ 2621.627273] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52a1ad2e-e5ec-1787-e5c5-9dced71da448" [ 2621.627273] env[61868]: _type = "Task" [ 2621.627273] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2621.637100] env[61868]: DEBUG oslo_vmware.api [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52a1ad2e-e5ec-1787-e5c5-9dced71da448, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2621.642067] env[61868]: DEBUG nova.network.neutron [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 2621.666598] env[61868]: DEBUG nova.network.neutron [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2621.675967] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Releasing lock "refresh_cache-43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2621.676455] env[61868]: DEBUG nova.compute.manager [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2621.676658] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2621.677768] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f688f46-e616-4843-b167-fb1025e23ca3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2621.685705] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2621.685934] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7eb315ea-2886-4a04-8f11-5ad01c3932c9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2621.717503] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2621.717748] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2621.717898] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Deleting the datastore file [datastore2] 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2621.718175] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-34a5032e-8c1f-4ba0-989a-157bb63eb9ad {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2621.725233] env[61868]: DEBUG oslo_vmware.api [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Waiting for the task: (returnval){ [ 2621.725233] env[61868]: value = "task-41242" [ 2621.725233] env[61868]: _type = "Task" [ 2621.725233] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2621.733621] env[61868]: DEBUG oslo_vmware.api [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Task: {'id': task-41242, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2622.137561] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2622.137993] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Creating directory with path [datastore2] vmware_temp/0a0b9316-3199-4b54-bdfd-6e790a51bc57/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2622.138080] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-60c55ca3-c51e-4672-8570-75b20a06825c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2622.150596] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Created directory with path [datastore2] vmware_temp/0a0b9316-3199-4b54-bdfd-6e790a51bc57/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2622.150810] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Fetch image to [datastore2] vmware_temp/0a0b9316-3199-4b54-bdfd-6e790a51bc57/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2622.150958] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/0a0b9316-3199-4b54-bdfd-6e790a51bc57/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2622.151797] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3eafabf-243a-4c22-9c76-31d23123677c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2622.159163] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dbb83e8-95ea-4427-9597-a9c721467a5b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2622.168470] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4029fd1b-0b88-453d-9b24-d7db4c888c77 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2622.201161] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af6da01d-3d73-41a5-9dcd-59758f7c13e3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2622.208030] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-aa5ead4a-c67e-4cae-90de-7a7362a291b6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2622.231739] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2622.239463] env[61868]: DEBUG oslo_vmware.api [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Task: {'id': task-41242, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.045238} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2622.239732] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2622.239910] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2622.240160] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2622.240354] env[61868]: INFO nova.compute.manager [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Took 0.56 seconds to destroy the instance on the hypervisor. [ 2622.240620] env[61868]: DEBUG oslo.service.loopingcall [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2622.240840] env[61868]: DEBUG nova.compute.manager [-] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Skipping network deallocation for instance since networking was not requested. {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 2622.243293] env[61868]: DEBUG nova.compute.claims [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2622.243495] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2622.243725] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2622.287094] env[61868]: DEBUG oslo_vmware.rw_handles [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0a0b9316-3199-4b54-bdfd-6e790a51bc57/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2622.345199] env[61868]: DEBUG oslo_vmware.rw_handles [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2622.345459] env[61868]: DEBUG oslo_vmware.rw_handles [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0a0b9316-3199-4b54-bdfd-6e790a51bc57/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2622.379670] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3936e92f-cf11-4b8b-9406-9f1144aa0302 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2622.387556] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503304bf-50a5-457d-b6ac-b47a28b4fe12 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2622.417803] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b80262bf-e512-4ba2-a3de-121993c9865b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2622.425308] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f5e424d-b0af-4090-9494-a07f569e6c47 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2622.438231] env[61868]: DEBUG nova.compute.provider_tree [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2622.446936] env[61868]: DEBUG nova.scheduler.client.report [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2622.464549] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.221s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2622.465090] env[61868]: ERROR nova.compute.manager [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2622.465090] env[61868]: Faults: ['InvalidArgument'] [ 2622.465090] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Traceback (most recent call last): [ 2622.465090] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2622.465090] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] self.driver.spawn(context, instance, image_meta, [ 2622.465090] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2622.465090] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2622.465090] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2622.465090] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] self._fetch_image_if_missing(context, vi) [ 2622.465090] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2622.465090] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] image_cache(vi, tmp_image_ds_loc) [ 2622.465090] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2622.465090] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] vm_util.copy_virtual_disk( [ 2622.465090] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2622.465090] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] session._wait_for_task(vmdk_copy_task) [ 2622.465090] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2622.465090] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] return self.wait_for_task(task_ref) [ 2622.465090] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2622.465090] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] return evt.wait() [ 2622.465090] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2622.465090] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] result = hub.switch() [ 2622.465090] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2622.465090] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] return self.greenlet.switch() [ 2622.465090] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2622.465090] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] self.f(*self.args, **self.kw) [ 2622.465090] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2622.465090] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] raise exceptions.translate_fault(task_info.error) [ 2622.465090] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2622.465090] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Faults: ['InvalidArgument'] [ 2622.465090] env[61868]: ERROR nova.compute.manager [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] [ 2622.466063] env[61868]: DEBUG nova.compute.utils [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2622.467319] env[61868]: DEBUG nova.compute.manager [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Build of instance 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a was re-scheduled: A specified parameter was not correct: fileType [ 2622.467319] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2622.467686] env[61868]: DEBUG nova.compute.manager [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2622.467915] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Acquiring lock "refresh_cache-43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2622.468079] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Acquired lock "refresh_cache-43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2622.468240] env[61868]: DEBUG nova.network.neutron [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 2622.495453] env[61868]: DEBUG nova.network.neutron [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 2622.518586] env[61868]: DEBUG nova.network.neutron [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2622.528557] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Releasing lock "refresh_cache-43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2622.528777] env[61868]: DEBUG nova.compute.manager [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2622.528953] env[61868]: DEBUG nova.compute.manager [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Skipping network deallocation for instance since networking was not requested. {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 2622.622730] env[61868]: INFO nova.scheduler.client.report [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Deleted allocations for instance 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a [ 2622.644008] env[61868]: DEBUG oslo_concurrency.lockutils [None req-b90547ed-89dc-471b-aa2b-ccb4c83dabf8 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Lock "43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 243.162s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2622.644279] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56093657-9633-4cd2-8d4c-23c75fda82b6 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Lock "43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 47.998s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2622.644496] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56093657-9633-4cd2-8d4c-23c75fda82b6 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Acquiring lock "43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2622.644699] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56093657-9633-4cd2-8d4c-23c75fda82b6 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Lock "43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2622.644863] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56093657-9633-4cd2-8d4c-23c75fda82b6 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Lock "43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2622.646772] env[61868]: INFO nova.compute.manager [None req-56093657-9633-4cd2-8d4c-23c75fda82b6 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Terminating instance [ 2622.648338] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56093657-9633-4cd2-8d4c-23c75fda82b6 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Acquiring lock "refresh_cache-43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2622.648481] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56093657-9633-4cd2-8d4c-23c75fda82b6 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Acquired lock "refresh_cache-43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2622.648651] env[61868]: DEBUG nova.network.neutron [None req-56093657-9633-4cd2-8d4c-23c75fda82b6 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 2622.677306] env[61868]: DEBUG nova.network.neutron [None req-56093657-9633-4cd2-8d4c-23c75fda82b6 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 2622.700746] env[61868]: DEBUG nova.network.neutron [None req-56093657-9633-4cd2-8d4c-23c75fda82b6 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2622.708779] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56093657-9633-4cd2-8d4c-23c75fda82b6 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Releasing lock "refresh_cache-43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2622.709191] env[61868]: DEBUG nova.compute.manager [None req-56093657-9633-4cd2-8d4c-23c75fda82b6 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2622.709380] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-56093657-9633-4cd2-8d4c-23c75fda82b6 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2622.709885] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9bc766b5-0800-4fab-8bf9-35a717106583 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2622.718675] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77137514-b0cd-4d44-bc68-9458bb78cb99 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2622.742802] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-56093657-9633-4cd2-8d4c-23c75fda82b6 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a could not be found. [ 2622.743013] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-56093657-9633-4cd2-8d4c-23c75fda82b6 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2622.743191] env[61868]: INFO nova.compute.manager [None req-56093657-9633-4cd2-8d4c-23c75fda82b6 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Took 0.03 seconds to destroy the instance on the hypervisor. [ 2622.743433] env[61868]: DEBUG oslo.service.loopingcall [None req-56093657-9633-4cd2-8d4c-23c75fda82b6 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2622.743696] env[61868]: DEBUG nova.compute.manager [-] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2622.743764] env[61868]: DEBUG nova.network.neutron [-] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2622.763059] env[61868]: DEBUG nova.network.neutron [-] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 2622.770361] env[61868]: DEBUG nova.network.neutron [-] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2622.778787] env[61868]: INFO nova.compute.manager [-] [instance: 43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a] Took 0.03 seconds to deallocate network for instance. [ 2622.872873] env[61868]: DEBUG oslo_concurrency.lockutils [None req-56093657-9633-4cd2-8d4c-23c75fda82b6 tempest-ServerShowV254Test-603101269 tempest-ServerShowV254Test-603101269-project-member] Lock "43ecdb6d-e5dc-43a4-bbd1-c016aa15f82a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.228s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2645.352118] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2646.351135] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2651.357663] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2651.358102] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2651.358102] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 2651.370012] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2651.370201] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2651.370368] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 2652.360045] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2654.352069] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2655.351578] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2655.351770] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2655.362851] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2655.362851] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2655.363148] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2655.363148] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2655.364340] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcefecf7-ae8d-43da-83a9-18d5b1161175 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2655.373459] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a355fc-1f3a-401d-b8a5-7e14d9e2ef34 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2655.387312] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3aca397-f699-4b17-8d96-eb0848fbb57a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2655.393533] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d9275eb-be92-49a8-af80-97fe65031660 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2655.422651] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181929MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2655.422858] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2655.422990] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2655.554862] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 8b79cee2-64a5-45fa-b99d-3aea86d4dc91 actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2655.555027] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance b547b52d-bbb7-4211-b784-5fa278f852eb actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2655.555212] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2655.555359] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=768MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2655.570550] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Refreshing inventories for resource provider 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2655.583060] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Updating ProviderTree inventory for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2655.583242] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Updating inventory in ProviderTree for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2655.593507] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Refreshing aggregate associations for resource provider 6539a0d3-09f9-481f-a837-7ea10081c3cc, aggregates: None {{(pid=61868) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2655.608957] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Refreshing trait associations for resource provider 6539a0d3-09f9-481f-a837-7ea10081c3cc, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61868) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2655.643656] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92a3a65-4a1c-4cb4-bcd7-2245bac68a06 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2655.651453] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ff49ac6-57cc-409a-995a-2e0f68a489ab {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2655.682086] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f29d0b2a-3f03-4f98-bca7-26cf3fe5c298 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2655.689347] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14818fa7-9674-431c-9cb3-cb473e36afb5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2655.702519] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2655.711338] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2655.728257] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2655.728456] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.305s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2657.728250] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2658.352272] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2659.351964] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2659.352390] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 2659.352390] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2659.352552] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Cleaning up deleted instances with incomplete migration {{(pid=61868) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 2660.359877] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2660.360312] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Cleaning up deleted instances {{(pid=61868) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 2660.369586] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] There are 0 instances to clean {{(pid=61868) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 2667.721720] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ea975545-6237-41a4-9d3d-38076f0b79f2 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "b547b52d-bbb7-4211-b784-5fa278f852eb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2671.103936] env[61868]: WARNING oslo_vmware.rw_handles [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2671.103936] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2671.103936] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2671.103936] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2671.103936] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2671.103936] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 2671.103936] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2671.103936] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2671.103936] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2671.103936] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2671.103936] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2671.103936] env[61868]: ERROR oslo_vmware.rw_handles [ 2671.103936] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/0a0b9316-3199-4b54-bdfd-6e790a51bc57/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2671.106796] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2671.107206] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Copying Virtual Disk [datastore2] vmware_temp/0a0b9316-3199-4b54-bdfd-6e790a51bc57/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/0a0b9316-3199-4b54-bdfd-6e790a51bc57/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2671.107733] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d09c8988-0e41-4539-be44-2edea62c3418 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2671.120812] env[61868]: DEBUG oslo_vmware.api [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for the task: (returnval){ [ 2671.120812] env[61868]: value = "task-41243" [ 2671.120812] env[61868]: _type = "Task" [ 2671.120812] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2671.132994] env[61868]: DEBUG oslo_vmware.api [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': task-41243, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2671.631653] env[61868]: DEBUG oslo_vmware.exceptions [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2671.632069] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2671.632705] env[61868]: ERROR nova.compute.manager [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2671.632705] env[61868]: Faults: ['InvalidArgument'] [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Traceback (most recent call last): [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] yield resources [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] self.driver.spawn(context, instance, image_meta, [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] self._fetch_image_if_missing(context, vi) [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] image_cache(vi, tmp_image_ds_loc) [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] vm_util.copy_virtual_disk( [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] session._wait_for_task(vmdk_copy_task) [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] return self.wait_for_task(task_ref) [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] return evt.wait() [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] result = hub.switch() [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] return self.greenlet.switch() [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] self.f(*self.args, **self.kw) [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] raise exceptions.translate_fault(task_info.error) [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Faults: ['InvalidArgument'] [ 2671.632705] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] [ 2671.633766] env[61868]: INFO nova.compute.manager [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Terminating instance [ 2671.635284] env[61868]: DEBUG oslo_concurrency.lockutils [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2671.635539] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2671.635828] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-39d0b3b1-0cfe-4895-b98d-525a1eab8694 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2671.638451] env[61868]: DEBUG nova.compute.manager [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2671.638712] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2671.639578] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c646dac9-1e3f-4b97-aa07-b20cfd2c3d01 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2671.647875] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2671.648211] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1eff3c08-e78e-458c-8b2e-8748619a5c9f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2671.650713] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2671.650936] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2671.651989] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0272bfaa-52f3-4ecc-b24d-fb3568527771 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2671.657593] env[61868]: DEBUG oslo_vmware.api [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for the task: (returnval){ [ 2671.657593] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]5225714b-a477-9d41-438d-a044f33235d6" [ 2671.657593] env[61868]: _type = "Task" [ 2671.657593] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2671.666510] env[61868]: DEBUG oslo_vmware.api [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]5225714b-a477-9d41-438d-a044f33235d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2671.722906] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2671.723211] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2671.723443] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Deleting the datastore file [datastore2] 8b79cee2-64a5-45fa-b99d-3aea86d4dc91 {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2671.723766] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e3f17c9e-0100-4887-9de8-90f7cc04e42e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2671.730649] env[61868]: DEBUG oslo_vmware.api [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for the task: (returnval){ [ 2671.730649] env[61868]: value = "task-41245" [ 2671.730649] env[61868]: _type = "Task" [ 2671.730649] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2671.739304] env[61868]: DEBUG oslo_vmware.api [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': task-41245, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2672.168643] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2672.169061] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Creating directory with path [datastore2] vmware_temp/5f3bcec7-be17-4f37-a2b6-3e3d200d5886/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2672.169321] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-72a1c431-0ab2-442c-9740-4eaccca2ee82 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2672.181474] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Created directory with path [datastore2] vmware_temp/5f3bcec7-be17-4f37-a2b6-3e3d200d5886/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2672.181805] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Fetch image to [datastore2] vmware_temp/5f3bcec7-be17-4f37-a2b6-3e3d200d5886/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2672.182080] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/5f3bcec7-be17-4f37-a2b6-3e3d200d5886/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2672.182938] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f633c07f-80fa-469e-8f7c-582ec2a93491 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2672.190923] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54862e8c-a63e-4c22-a289-9c2350994c3c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2672.200457] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff93df13-c13f-45d1-84b3-dc3787360af9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2672.237764] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d56a30e6-8a0c-44ff-b34a-1d9f50f45a89 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2672.248214] env[61868]: DEBUG oslo_vmware.api [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Task: {'id': task-41245, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079487} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2672.248840] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2672.249082] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2672.249300] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2672.250119] env[61868]: INFO nova.compute.manager [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2672.251711] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4aaf9173-5d82-47be-a91b-343c5edc7038 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2672.254476] env[61868]: DEBUG nova.compute.claims [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2672.254701] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2672.254957] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2672.279280] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2672.332217] env[61868]: DEBUG oslo_vmware.rw_handles [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5f3bcec7-be17-4f37-a2b6-3e3d200d5886/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2672.385212] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ff6043-d45a-49b8-8dc1-cd40bf42d706 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2672.389968] env[61868]: DEBUG oslo_vmware.rw_handles [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2672.390146] env[61868]: DEBUG oslo_vmware.rw_handles [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5f3bcec7-be17-4f37-a2b6-3e3d200d5886/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2672.394249] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ab6bb9d-ddf2-43c5-8e9e-56142d48bd84 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2672.424878] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef3c2cb-fe71-4738-b8ea-34421594d595 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2672.432534] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69c8d2b8-3b2c-4c7a-b6fe-73748bc80e0e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2672.445559] env[61868]: DEBUG nova.compute.provider_tree [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2672.454150] env[61868]: DEBUG nova.scheduler.client.report [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2672.473048] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.218s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2672.473640] env[61868]: ERROR nova.compute.manager [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2672.473640] env[61868]: Faults: ['InvalidArgument'] [ 2672.473640] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Traceback (most recent call last): [ 2672.473640] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2672.473640] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] self.driver.spawn(context, instance, image_meta, [ 2672.473640] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2672.473640] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2672.473640] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2672.473640] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] self._fetch_image_if_missing(context, vi) [ 2672.473640] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2672.473640] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] image_cache(vi, tmp_image_ds_loc) [ 2672.473640] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2672.473640] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] vm_util.copy_virtual_disk( [ 2672.473640] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2672.473640] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] session._wait_for_task(vmdk_copy_task) [ 2672.473640] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2672.473640] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] return self.wait_for_task(task_ref) [ 2672.473640] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2672.473640] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] return evt.wait() [ 2672.473640] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2672.473640] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] result = hub.switch() [ 2672.473640] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2672.473640] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] return self.greenlet.switch() [ 2672.473640] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2672.473640] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] self.f(*self.args, **self.kw) [ 2672.473640] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2672.473640] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] raise exceptions.translate_fault(task_info.error) [ 2672.473640] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2672.473640] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Faults: ['InvalidArgument'] [ 2672.473640] env[61868]: ERROR nova.compute.manager [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] [ 2672.474651] env[61868]: DEBUG nova.compute.utils [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2672.475768] env[61868]: DEBUG nova.compute.manager [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Build of instance 8b79cee2-64a5-45fa-b99d-3aea86d4dc91 was re-scheduled: A specified parameter was not correct: fileType [ 2672.475768] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2672.476163] env[61868]: DEBUG nova.compute.manager [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2672.476337] env[61868]: DEBUG nova.compute.manager [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2672.476505] env[61868]: DEBUG nova.compute.manager [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2672.476673] env[61868]: DEBUG nova.network.neutron [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2672.915027] env[61868]: DEBUG nova.network.neutron [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2672.926657] env[61868]: INFO nova.compute.manager [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Took 0.45 seconds to deallocate network for instance. [ 2673.027130] env[61868]: INFO nova.scheduler.client.report [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Deleted allocations for instance 8b79cee2-64a5-45fa-b99d-3aea86d4dc91 [ 2673.055299] env[61868]: DEBUG oslo_concurrency.lockutils [None req-70e06b92-7067-4c1c-af88-9dde4c61474a tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "8b79cee2-64a5-45fa-b99d-3aea86d4dc91" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 270.470s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2673.055600] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1bcfa582-401d-456f-8528-9ca478f9b9b7 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "8b79cee2-64a5-45fa-b99d-3aea86d4dc91" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 75.554s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2673.055863] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1bcfa582-401d-456f-8528-9ca478f9b9b7 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Acquiring lock "8b79cee2-64a5-45fa-b99d-3aea86d4dc91-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2673.056126] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1bcfa582-401d-456f-8528-9ca478f9b9b7 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "8b79cee2-64a5-45fa-b99d-3aea86d4dc91-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2673.056329] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1bcfa582-401d-456f-8528-9ca478f9b9b7 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "8b79cee2-64a5-45fa-b99d-3aea86d4dc91-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2673.058284] env[61868]: INFO nova.compute.manager [None req-1bcfa582-401d-456f-8528-9ca478f9b9b7 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Terminating instance [ 2673.060588] env[61868]: DEBUG nova.compute.manager [None req-1bcfa582-401d-456f-8528-9ca478f9b9b7 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2673.060788] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1bcfa582-401d-456f-8528-9ca478f9b9b7 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2673.061259] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-42db83d5-0a93-498e-9f8c-19738a6d6cba {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2673.070777] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c925992b-052b-4491-894c-64e600af1d65 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2673.099091] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-1bcfa582-401d-456f-8528-9ca478f9b9b7 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8b79cee2-64a5-45fa-b99d-3aea86d4dc91 could not be found. [ 2673.099338] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-1bcfa582-401d-456f-8528-9ca478f9b9b7 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2673.099539] env[61868]: INFO nova.compute.manager [None req-1bcfa582-401d-456f-8528-9ca478f9b9b7 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2673.099790] env[61868]: DEBUG oslo.service.loopingcall [None req-1bcfa582-401d-456f-8528-9ca478f9b9b7 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2673.100522] env[61868]: DEBUG nova.compute.manager [-] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2673.100522] env[61868]: DEBUG nova.network.neutron [-] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2673.198105] env[61868]: DEBUG nova.network.neutron [-] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2673.208538] env[61868]: INFO nova.compute.manager [-] [instance: 8b79cee2-64a5-45fa-b99d-3aea86d4dc91] Took 0.11 seconds to deallocate network for instance. [ 2673.331247] env[61868]: DEBUG oslo_concurrency.lockutils [None req-1bcfa582-401d-456f-8528-9ca478f9b9b7 tempest-ImagesTestJSON-1042540315 tempest-ImagesTestJSON-1042540315-project-member] Lock "8b79cee2-64a5-45fa-b99d-3aea86d4dc91" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.275s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2705.362216] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2713.346690] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2713.351290] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2713.351490] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2713.351619] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 2713.363751] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2713.363929] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 2714.351186] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2716.352058] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2716.362581] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2716.362962] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2716.363139] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2716.363344] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2716.364481] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-386ee50d-b2e4-466b-ab46-798647167ad6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2716.374727] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-248c2707-88d2-465d-9a82-d70db6fadde0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2716.388577] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5871380-f362-4ec1-943b-2449e11f9a23 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2716.394906] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caf45b70-155e-4a13-9b2b-3a80d12afc31 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2716.423177] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181926MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2716.423341] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2716.423601] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2716.461450] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance b547b52d-bbb7-4211-b784-5fa278f852eb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2716.461657] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2716.461797] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=640MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2716.488351] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe4cb49-271c-4d5d-a3c5-202bb7c484dd {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2716.495744] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab99b230-5e6f-4b87-9b85-5b9ca5ab4abc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2716.527616] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a14a85b1-20a9-4cb1-8dc5-0108722c3aa9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2716.534840] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a11c5b6-95d0-4b5e-a5d2-b42e46cdd9a4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2716.547870] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2716.556188] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2716.572907] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2716.573094] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.150s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2717.567281] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2717.578827] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2718.350983] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2718.351253] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2719.351033] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2719.351386] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 2721.118788] env[61868]: WARNING oslo_vmware.rw_handles [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2721.118788] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2721.118788] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2721.118788] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2721.118788] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2721.118788] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 2721.118788] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2721.118788] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2721.118788] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2721.118788] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2721.118788] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2721.118788] env[61868]: ERROR oslo_vmware.rw_handles [ 2721.119495] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/5f3bcec7-be17-4f37-a2b6-3e3d200d5886/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2721.122030] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2721.122261] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Copying Virtual Disk [datastore2] vmware_temp/5f3bcec7-be17-4f37-a2b6-3e3d200d5886/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/5f3bcec7-be17-4f37-a2b6-3e3d200d5886/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2721.122582] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f7619ab4-47dd-438d-903e-01c30ee9b716 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2721.130211] env[61868]: DEBUG oslo_vmware.api [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for the task: (returnval){ [ 2721.130211] env[61868]: value = "task-41246" [ 2721.130211] env[61868]: _type = "Task" [ 2721.130211] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2721.138265] env[61868]: DEBUG oslo_vmware.api [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': task-41246, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2721.640929] env[61868]: DEBUG oslo_vmware.exceptions [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2721.641269] env[61868]: DEBUG oslo_concurrency.lockutils [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2721.642026] env[61868]: ERROR nova.compute.manager [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2721.642026] env[61868]: Faults: ['InvalidArgument'] [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Traceback (most recent call last): [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] yield resources [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] self.driver.spawn(context, instance, image_meta, [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] self._fetch_image_if_missing(context, vi) [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] image_cache(vi, tmp_image_ds_loc) [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] vm_util.copy_virtual_disk( [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] session._wait_for_task(vmdk_copy_task) [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] return self.wait_for_task(task_ref) [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] return evt.wait() [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] result = hub.switch() [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] return self.greenlet.switch() [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] self.f(*self.args, **self.kw) [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] raise exceptions.translate_fault(task_info.error) [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Faults: ['InvalidArgument'] [ 2721.642026] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] [ 2721.643231] env[61868]: INFO nova.compute.manager [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Terminating instance [ 2721.645969] env[61868]: DEBUG nova.compute.manager [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2721.646278] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2721.647333] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4347a52-f690-4a90-9e7f-0acf0c41f49a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2721.655630] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2721.655939] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-83680290-c423-4abb-a6b0-b2e9c625b98f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2721.721316] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2721.721606] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2721.721830] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Deleting the datastore file [datastore2] b547b52d-bbb7-4211-b784-5fa278f852eb {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2721.722043] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6dfaeec3-6342-41a9-8827-48f4e408c8d0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2721.728863] env[61868]: DEBUG oslo_vmware.api [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for the task: (returnval){ [ 2721.728863] env[61868]: value = "task-41248" [ 2721.728863] env[61868]: _type = "Task" [ 2721.728863] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2721.736907] env[61868]: DEBUG oslo_vmware.api [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': task-41248, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2722.238308] env[61868]: DEBUG oslo_vmware.api [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Task: {'id': task-41248, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109442} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2722.238702] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2722.238753] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2722.238885] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2722.239059] env[61868]: INFO nova.compute.manager [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Took 0.59 seconds to destroy the instance on the hypervisor. [ 2722.241185] env[61868]: DEBUG nova.compute.claims [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2722.241357] env[61868]: DEBUG oslo_concurrency.lockutils [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2722.241614] env[61868]: DEBUG oslo_concurrency.lockutils [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2722.311675] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf1b9887-d6cd-4840-9121-6973d8daede0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2722.319958] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c947a62-69ee-46c6-a789-4bd39ded6a10 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2722.351279] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b93208-2510-4951-991a-95934bbffaf7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2722.359390] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d89c2b4-1bef-43d9-9ffd-a8515c2eb36c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2722.372886] env[61868]: DEBUG nova.compute.provider_tree [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2722.381357] env[61868]: DEBUG nova.scheduler.client.report [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2722.397315] env[61868]: DEBUG oslo_concurrency.lockutils [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.156s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2722.397958] env[61868]: ERROR nova.compute.manager [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2722.397958] env[61868]: Faults: ['InvalidArgument'] [ 2722.397958] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Traceback (most recent call last): [ 2722.397958] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2722.397958] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] self.driver.spawn(context, instance, image_meta, [ 2722.397958] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2722.397958] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2722.397958] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2722.397958] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] self._fetch_image_if_missing(context, vi) [ 2722.397958] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2722.397958] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] image_cache(vi, tmp_image_ds_loc) [ 2722.397958] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2722.397958] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] vm_util.copy_virtual_disk( [ 2722.397958] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2722.397958] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] session._wait_for_task(vmdk_copy_task) [ 2722.397958] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2722.397958] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] return self.wait_for_task(task_ref) [ 2722.397958] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2722.397958] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] return evt.wait() [ 2722.397958] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2722.397958] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] result = hub.switch() [ 2722.397958] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2722.397958] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] return self.greenlet.switch() [ 2722.397958] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2722.397958] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] self.f(*self.args, **self.kw) [ 2722.397958] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2722.397958] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] raise exceptions.translate_fault(task_info.error) [ 2722.397958] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2722.397958] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Faults: ['InvalidArgument'] [ 2722.397958] env[61868]: ERROR nova.compute.manager [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] [ 2722.399170] env[61868]: DEBUG nova.compute.utils [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2722.400687] env[61868]: DEBUG nova.compute.manager [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Build of instance b547b52d-bbb7-4211-b784-5fa278f852eb was re-scheduled: A specified parameter was not correct: fileType [ 2722.400687] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2722.401159] env[61868]: DEBUG nova.compute.manager [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2722.401389] env[61868]: DEBUG nova.compute.manager [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2722.401664] env[61868]: DEBUG nova.compute.manager [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2722.401891] env[61868]: DEBUG nova.network.neutron [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2722.682119] env[61868]: DEBUG nova.network.neutron [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2722.696667] env[61868]: INFO nova.compute.manager [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Took 0.29 seconds to deallocate network for instance. [ 2722.792033] env[61868]: INFO nova.scheduler.client.report [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Deleted allocations for instance b547b52d-bbb7-4211-b784-5fa278f852eb [ 2722.812745] env[61868]: DEBUG oslo_concurrency.lockutils [None req-910d9db5-5da8-411b-a2a4-3a11e3bbc51f tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "b547b52d-bbb7-4211-b784-5fa278f852eb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 250.855s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2722.813023] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ea975545-6237-41a4-9d3d-38076f0b79f2 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "b547b52d-bbb7-4211-b784-5fa278f852eb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 55.091s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2722.813244] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ea975545-6237-41a4-9d3d-38076f0b79f2 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Acquiring lock "b547b52d-bbb7-4211-b784-5fa278f852eb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2722.813475] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ea975545-6237-41a4-9d3d-38076f0b79f2 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "b547b52d-bbb7-4211-b784-5fa278f852eb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2722.813648] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ea975545-6237-41a4-9d3d-38076f0b79f2 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "b547b52d-bbb7-4211-b784-5fa278f852eb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2722.815922] env[61868]: INFO nova.compute.manager [None req-ea975545-6237-41a4-9d3d-38076f0b79f2 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Terminating instance [ 2722.817706] env[61868]: DEBUG nova.compute.manager [None req-ea975545-6237-41a4-9d3d-38076f0b79f2 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2722.817934] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-ea975545-6237-41a4-9d3d-38076f0b79f2 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2722.818402] env[61868]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-687d6b78-5b61-409c-aac9-45e40d8c4aeb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2722.827614] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b7beb80-24eb-486c-96ab-a335a7438694 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2722.852192] env[61868]: WARNING nova.virt.vmwareapi.vmops [None req-ea975545-6237-41a4-9d3d-38076f0b79f2 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b547b52d-bbb7-4211-b784-5fa278f852eb could not be found. [ 2722.852438] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-ea975545-6237-41a4-9d3d-38076f0b79f2 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2722.852737] env[61868]: INFO nova.compute.manager [None req-ea975545-6237-41a4-9d3d-38076f0b79f2 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Took 0.03 seconds to destroy the instance on the hypervisor. [ 2722.853006] env[61868]: DEBUG oslo.service.loopingcall [None req-ea975545-6237-41a4-9d3d-38076f0b79f2 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2722.853266] env[61868]: DEBUG nova.compute.manager [-] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2722.853373] env[61868]: DEBUG nova.network.neutron [-] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2722.885409] env[61868]: DEBUG nova.network.neutron [-] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2722.894030] env[61868]: INFO nova.compute.manager [-] [instance: b547b52d-bbb7-4211-b784-5fa278f852eb] Took 0.04 seconds to deallocate network for instance. [ 2722.986985] env[61868]: DEBUG oslo_concurrency.lockutils [None req-ea975545-6237-41a4-9d3d-38076f0b79f2 tempest-DeleteServersTestJSON-1745478556 tempest-DeleteServersTestJSON-1745478556-project-member] Lock "b547b52d-bbb7-4211-b784-5fa278f852eb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.174s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2728.791484] env[61868]: DEBUG oslo_concurrency.lockutils [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Acquiring lock "7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2728.791839] env[61868]: DEBUG oslo_concurrency.lockutils [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Lock "7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2728.805529] env[61868]: DEBUG nova.compute.manager [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2728.857419] env[61868]: DEBUG oslo_concurrency.lockutils [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2728.857672] env[61868]: DEBUG oslo_concurrency.lockutils [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2728.859120] env[61868]: INFO nova.compute.claims [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2728.935654] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af7aa84d-1b60-4ef4-be4c-8eaeeb51ca33 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2728.944603] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-389d0047-e7db-4c0f-b090-e66e7cb99324 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2728.978974] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea55cd1-4495-43ad-a4ec-4c343f7114c7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2728.987264] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c491964d-48e7-40fa-9693-6857e5dfb25c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2729.001947] env[61868]: DEBUG nova.compute.provider_tree [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2729.010929] env[61868]: DEBUG nova.scheduler.client.report [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2729.029758] env[61868]: DEBUG oslo_concurrency.lockutils [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.172s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2729.030337] env[61868]: DEBUG nova.compute.manager [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2729.070075] env[61868]: DEBUG nova.compute.utils [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2729.071338] env[61868]: DEBUG nova.compute.manager [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2729.071599] env[61868]: DEBUG nova.network.neutron [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2729.082954] env[61868]: DEBUG nova.compute.manager [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2729.119616] env[61868]: DEBUG nova.policy [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c06041409e46458b9b286de5dc398d21', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '803cd9148e804f1abc4cbc0f51588036', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 2729.151099] env[61868]: DEBUG nova.compute.manager [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2729.172330] env[61868]: DEBUG nova.virt.hardware [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2729.172574] env[61868]: DEBUG nova.virt.hardware [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2729.172732] env[61868]: DEBUG nova.virt.hardware [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2729.172907] env[61868]: DEBUG nova.virt.hardware [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2729.173052] env[61868]: DEBUG nova.virt.hardware [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2729.173197] env[61868]: DEBUG nova.virt.hardware [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2729.173400] env[61868]: DEBUG nova.virt.hardware [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2729.173557] env[61868]: DEBUG nova.virt.hardware [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2729.173726] env[61868]: DEBUG nova.virt.hardware [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2729.173916] env[61868]: DEBUG nova.virt.hardware [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2729.174089] env[61868]: DEBUG nova.virt.hardware [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2729.174949] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4f95711-1945-465f-be4f-989a76ec56db {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2729.183706] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-192a54da-c9ed-421a-872f-b874d2cf541b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2729.373778] env[61868]: DEBUG nova.network.neutron [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Successfully created port: 44101a8d-2a25-45b1-813c-7071a7b7ef7f {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2729.899688] env[61868]: DEBUG nova.compute.manager [req-fe1e3ca4-9638-4c82-a225-ccfcfc500d7e req-aed0ac00-99a7-4c2a-889c-57a045759e28 service nova] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Received event network-vif-plugged-44101a8d-2a25-45b1-813c-7071a7b7ef7f {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2729.899999] env[61868]: DEBUG oslo_concurrency.lockutils [req-fe1e3ca4-9638-4c82-a225-ccfcfc500d7e req-aed0ac00-99a7-4c2a-889c-57a045759e28 service nova] Acquiring lock "7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2729.900137] env[61868]: DEBUG oslo_concurrency.lockutils [req-fe1e3ca4-9638-4c82-a225-ccfcfc500d7e req-aed0ac00-99a7-4c2a-889c-57a045759e28 service nova] Lock "7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2729.900307] env[61868]: DEBUG oslo_concurrency.lockutils [req-fe1e3ca4-9638-4c82-a225-ccfcfc500d7e req-aed0ac00-99a7-4c2a-889c-57a045759e28 service nova] Lock "7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2729.900508] env[61868]: DEBUG nova.compute.manager [req-fe1e3ca4-9638-4c82-a225-ccfcfc500d7e req-aed0ac00-99a7-4c2a-889c-57a045759e28 service nova] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] No waiting events found dispatching network-vif-plugged-44101a8d-2a25-45b1-813c-7071a7b7ef7f {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2729.900708] env[61868]: WARNING nova.compute.manager [req-fe1e3ca4-9638-4c82-a225-ccfcfc500d7e req-aed0ac00-99a7-4c2a-889c-57a045759e28 service nova] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Received unexpected event network-vif-plugged-44101a8d-2a25-45b1-813c-7071a7b7ef7f for instance with vm_state building and task_state spawning. [ 2730.163806] env[61868]: DEBUG nova.network.neutron [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Successfully updated port: 44101a8d-2a25-45b1-813c-7071a7b7ef7f {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2730.174862] env[61868]: DEBUG oslo_concurrency.lockutils [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Acquiring lock "refresh_cache-7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2730.175017] env[61868]: DEBUG oslo_concurrency.lockutils [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Acquired lock "refresh_cache-7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2730.175184] env[61868]: DEBUG nova.network.neutron [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 2730.212607] env[61868]: DEBUG nova.network.neutron [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 2730.396569] env[61868]: DEBUG nova.network.neutron [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Updating instance_info_cache with network_info: [{"id": "44101a8d-2a25-45b1-813c-7071a7b7ef7f", "address": "fa:16:3e:ff:57:2d", "network": {"id": "c19ed6c7-d452-46a4-b00c-abe9784af969", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-795088329-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "803cd9148e804f1abc4cbc0f51588036", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae215ba8-f7a5-4b23-a055-90316d29817f", "external-id": "nsx-vlan-transportzone-798", "segmentation_id": 798, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44101a8d-2a", "ovs_interfaceid": "44101a8d-2a25-45b1-813c-7071a7b7ef7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2730.413161] env[61868]: DEBUG oslo_concurrency.lockutils [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Releasing lock "refresh_cache-7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2730.413397] env[61868]: DEBUG nova.compute.manager [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Instance network_info: |[{"id": "44101a8d-2a25-45b1-813c-7071a7b7ef7f", "address": "fa:16:3e:ff:57:2d", "network": {"id": "c19ed6c7-d452-46a4-b00c-abe9784af969", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-795088329-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "803cd9148e804f1abc4cbc0f51588036", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae215ba8-f7a5-4b23-a055-90316d29817f", "external-id": "nsx-vlan-transportzone-798", "segmentation_id": 798, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44101a8d-2a", "ovs_interfaceid": "44101a8d-2a25-45b1-813c-7071a7b7ef7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2730.414135] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:57:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ae215ba8-f7a5-4b23-a055-90316d29817f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '44101a8d-2a25-45b1-813c-7071a7b7ef7f', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2730.422397] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Creating folder: Project (803cd9148e804f1abc4cbc0f51588036). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2730.423591] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ba34bc6e-9963-4607-a7e2-f3ea046024c1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2730.437434] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Created folder: Project (803cd9148e804f1abc4cbc0f51588036) in parent group-v18181. [ 2730.437635] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Creating folder: Instances. Parent ref: group-v18315. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2730.437818] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2c7d4467-b517-4ddc-951a-e3ccf2b18eb5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2730.447516] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Created folder: Instances in parent group-v18315. [ 2730.447773] env[61868]: DEBUG oslo.service.loopingcall [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2730.447968] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2730.448215] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7f73f26b-fc94-489e-96bc-23b9abd3596d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2730.470081] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2730.470081] env[61868]: value = "task-41251" [ 2730.470081] env[61868]: _type = "Task" [ 2730.470081] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2730.478049] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41251, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2730.980399] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41251, 'name': CreateVM_Task, 'duration_secs': 0.317523} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2730.980765] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2730.987824] env[61868]: DEBUG oslo_concurrency.lockutils [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2730.988077] env[61868]: DEBUG oslo_concurrency.lockutils [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2730.990895] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3acb87e1-5da0-48a0-b318-75adab290b0f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2731.016957] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Reconfiguring VM instance to enable vnc on port - 5900 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 2731.017313] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a0d4ca7-18f5-4a33-afeb-9764b74f2816 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2731.033040] env[61868]: DEBUG oslo_vmware.api [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Waiting for the task: (returnval){ [ 2731.033040] env[61868]: value = "task-41252" [ 2731.033040] env[61868]: _type = "Task" [ 2731.033040] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2731.040750] env[61868]: DEBUG oslo_vmware.api [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Task: {'id': task-41252, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2731.543158] env[61868]: DEBUG oslo_vmware.api [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Task: {'id': task-41252, 'name': ReconfigVM_Task, 'duration_secs': 0.111036} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2731.543452] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Reconfigured VM instance to enable vnc on port - 5900 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 2731.543665] env[61868]: DEBUG oslo_concurrency.lockutils [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.556s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2731.544032] env[61868]: DEBUG oslo_concurrency.lockutils [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2731.544244] env[61868]: DEBUG oslo_concurrency.lockutils [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2731.544586] env[61868]: DEBUG oslo_concurrency.lockutils [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2731.544851] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b8905fa-45ec-4a24-a89c-258b4f29b7a8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2731.549450] env[61868]: DEBUG oslo_vmware.api [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Waiting for the task: (returnval){ [ 2731.549450] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52d1bbbf-fb93-7cab-b6e5-4552ab47b064" [ 2731.549450] env[61868]: _type = "Task" [ 2731.549450] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2731.557633] env[61868]: DEBUG oslo_vmware.api [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52d1bbbf-fb93-7cab-b6e5-4552ab47b064, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2732.059633] env[61868]: DEBUG oslo_concurrency.lockutils [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2732.060044] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2732.060135] env[61868]: DEBUG oslo_concurrency.lockutils [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2732.060285] env[61868]: DEBUG oslo_concurrency.lockutils [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2732.060556] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2732.060770] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-12ff5580-7326-46bd-ad8b-543933295f27 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2732.068821] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2732.068997] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2732.069761] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2fa8bf6-857f-4184-a040-d54b1c8a1875 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2732.074826] env[61868]: DEBUG oslo_vmware.api [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Waiting for the task: (returnval){ [ 2732.074826] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]52290e6f-0174-102c-72b5-7691730eafe7" [ 2732.074826] env[61868]: _type = "Task" [ 2732.074826] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2732.082556] env[61868]: DEBUG oslo_vmware.api [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]52290e6f-0174-102c-72b5-7691730eafe7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2732.106988] env[61868]: DEBUG nova.compute.manager [req-b598df5c-efc2-4689-a125-b6916fe818df req-104db092-79a6-41aa-b4d2-dcd30869bad4 service nova] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Received event network-changed-44101a8d-2a25-45b1-813c-7071a7b7ef7f {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2732.107153] env[61868]: DEBUG nova.compute.manager [req-b598df5c-efc2-4689-a125-b6916fe818df req-104db092-79a6-41aa-b4d2-dcd30869bad4 service nova] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Refreshing instance network info cache due to event network-changed-44101a8d-2a25-45b1-813c-7071a7b7ef7f. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2732.107360] env[61868]: DEBUG oslo_concurrency.lockutils [req-b598df5c-efc2-4689-a125-b6916fe818df req-104db092-79a6-41aa-b4d2-dcd30869bad4 service nova] Acquiring lock "refresh_cache-7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2732.107551] env[61868]: DEBUG oslo_concurrency.lockutils [req-b598df5c-efc2-4689-a125-b6916fe818df req-104db092-79a6-41aa-b4d2-dcd30869bad4 service nova] Acquired lock "refresh_cache-7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2732.107736] env[61868]: DEBUG nova.network.neutron [req-b598df5c-efc2-4689-a125-b6916fe818df req-104db092-79a6-41aa-b4d2-dcd30869bad4 service nova] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Refreshing network info cache for port 44101a8d-2a25-45b1-813c-7071a7b7ef7f {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 2732.323880] env[61868]: DEBUG nova.network.neutron [req-b598df5c-efc2-4689-a125-b6916fe818df req-104db092-79a6-41aa-b4d2-dcd30869bad4 service nova] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Updated VIF entry in instance network info cache for port 44101a8d-2a25-45b1-813c-7071a7b7ef7f. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 2732.324380] env[61868]: DEBUG nova.network.neutron [req-b598df5c-efc2-4689-a125-b6916fe818df req-104db092-79a6-41aa-b4d2-dcd30869bad4 service nova] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Updating instance_info_cache with network_info: [{"id": "44101a8d-2a25-45b1-813c-7071a7b7ef7f", "address": "fa:16:3e:ff:57:2d", "network": {"id": "c19ed6c7-d452-46a4-b00c-abe9784af969", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-795088329-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "803cd9148e804f1abc4cbc0f51588036", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae215ba8-f7a5-4b23-a055-90316d29817f", "external-id": "nsx-vlan-transportzone-798", "segmentation_id": 798, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44101a8d-2a", "ovs_interfaceid": "44101a8d-2a25-45b1-813c-7071a7b7ef7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2732.336072] env[61868]: DEBUG oslo_concurrency.lockutils [req-b598df5c-efc2-4689-a125-b6916fe818df req-104db092-79a6-41aa-b4d2-dcd30869bad4 service nova] Releasing lock "refresh_cache-7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2732.585457] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2732.585713] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Creating directory with path [datastore2] vmware_temp/1ac894bd-dbf8-4ea5-bd81-27a0716c2d38/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2732.585957] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5043525e-cd9c-4b26-9a59-f20052b337f6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2732.598966] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Created directory with path [datastore2] vmware_temp/1ac894bd-dbf8-4ea5-bd81-27a0716c2d38/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2732.599175] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Fetch image to [datastore2] vmware_temp/1ac894bd-dbf8-4ea5-bd81-27a0716c2d38/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2732.599348] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/1ac894bd-dbf8-4ea5-bd81-27a0716c2d38/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2732.600340] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04e291d5-5950-4cb7-bce8-1776febf571a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2732.610430] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c3c2c2b-026b-468e-87b5-28000ab7a7b1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2732.620253] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7508376b-49a8-43ac-983a-30d5c3fce829 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2732.652473] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b22b3969-7558-4f13-8856-85185a614fb5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2732.659205] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-eb57f559-9a75-4f97-91eb-57cf76e96066 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2732.683795] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2732.733201] env[61868]: DEBUG oslo_vmware.rw_handles [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1ac894bd-dbf8-4ea5-bd81-27a0716c2d38/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2732.789642] env[61868]: DEBUG oslo_vmware.rw_handles [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2732.789833] env[61868]: DEBUG oslo_vmware.rw_handles [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1ac894bd-dbf8-4ea5-bd81-27a0716c2d38/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2767.352688] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2773.347458] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2774.351045] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2774.351409] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2774.351409] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 2774.362144] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2774.362350] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 2776.351882] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2777.351808] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2777.361914] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2777.362269] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2777.362327] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2777.362451] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2777.363593] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c2ae990-b555-4b2c-b0b8-3f6544dbbcf0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2777.372735] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-810fd3d7-c31e-40b4-a869-970fee69ef85 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2777.388330] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c300e17c-5054-4392-85cc-19d24c655fb2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2777.394866] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb31eaa5-09cf-4eee-85b2-ad190c2952a9 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2777.424345] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181935MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2777.424513] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2777.424758] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2777.462404] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2777.462607] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2777.462752] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=640MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2777.490045] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2d645c8-16e0-4d14-872c-1fcf9bc7395e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2777.498138] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa27226c-acd2-4f0f-9a0b-1a2686970a69 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2777.529854] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bd0c93b-df94-406b-8920-7d3daa7d9c69 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2777.537230] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41aba2fa-c685-4d07-8d7a-ff9af6afd4cb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2777.550836] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2777.560183] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2777.577596] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2777.577793] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.153s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2779.578145] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2779.578534] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2779.578534] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2780.352078] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2780.352287] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 2780.476694] env[61868]: WARNING oslo_vmware.rw_handles [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2780.476694] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2780.476694] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2780.476694] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2780.476694] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2780.476694] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 2780.476694] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2780.476694] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2780.476694] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2780.476694] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2780.476694] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2780.476694] env[61868]: ERROR oslo_vmware.rw_handles [ 2780.477248] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/1ac894bd-dbf8-4ea5-bd81-27a0716c2d38/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2780.479165] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2780.479446] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Copying Virtual Disk [datastore2] vmware_temp/1ac894bd-dbf8-4ea5-bd81-27a0716c2d38/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/1ac894bd-dbf8-4ea5-bd81-27a0716c2d38/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2780.479746] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f9117fba-3f08-40e9-945f-d345f1ada4c8 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2780.488374] env[61868]: DEBUG oslo_vmware.api [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Waiting for the task: (returnval){ [ 2780.488374] env[61868]: value = "task-41253" [ 2780.488374] env[61868]: _type = "Task" [ 2780.488374] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2780.496191] env[61868]: DEBUG oslo_vmware.api [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Task: {'id': task-41253, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2780.999287] env[61868]: DEBUG oslo_vmware.exceptions [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2780.999682] env[61868]: DEBUG oslo_concurrency.lockutils [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2781.000117] env[61868]: ERROR nova.compute.manager [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2781.000117] env[61868]: Faults: ['InvalidArgument'] [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Traceback (most recent call last): [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] yield resources [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] self.driver.spawn(context, instance, image_meta, [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] self._fetch_image_if_missing(context, vi) [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] image_cache(vi, tmp_image_ds_loc) [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] vm_util.copy_virtual_disk( [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] session._wait_for_task(vmdk_copy_task) [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] return self.wait_for_task(task_ref) [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] return evt.wait() [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] result = hub.switch() [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] return self.greenlet.switch() [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] self.f(*self.args, **self.kw) [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] raise exceptions.translate_fault(task_info.error) [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Faults: ['InvalidArgument'] [ 2781.000117] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] [ 2781.001891] env[61868]: INFO nova.compute.manager [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Terminating instance [ 2781.003307] env[61868]: DEBUG nova.compute.manager [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2781.003498] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2781.004269] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89bf77f0-3ac8-4dfa-937b-0961d4ffb392 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2781.011278] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2781.011541] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8561b9a8-6fab-4f4b-8565-720f5b823823 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2781.076457] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2781.076762] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2781.076952] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Deleting the datastore file [datastore2] 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1 {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2781.077226] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-15743d7a-5118-4711-8dc4-e6809fe8ffc5 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2781.084234] env[61868]: DEBUG oslo_vmware.api [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Waiting for the task: (returnval){ [ 2781.084234] env[61868]: value = "task-41255" [ 2781.084234] env[61868]: _type = "Task" [ 2781.084234] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2781.092157] env[61868]: DEBUG oslo_vmware.api [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Task: {'id': task-41255, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2781.594922] env[61868]: DEBUG oslo_vmware.api [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Task: {'id': task-41255, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.06831} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2781.595205] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2781.595361] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2781.595527] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2781.595701] env[61868]: INFO nova.compute.manager [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Took 0.59 seconds to destroy the instance on the hypervisor. [ 2781.597803] env[61868]: DEBUG nova.compute.claims [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2781.597975] env[61868]: DEBUG oslo_concurrency.lockutils [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2781.598186] env[61868]: DEBUG oslo_concurrency.lockutils [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2781.667807] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7049f92d-3918-4651-95c1-992521c5e592 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2781.675824] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e97c4127-f914-4a83-bbab-0474767f4282 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2782.470246] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f07370a-905f-4f12-ab21-0d56629699d1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2782.478119] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db80d42d-8948-485d-ad6c-8c3264c0bd0e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2782.492079] env[61868]: DEBUG nova.compute.provider_tree [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2782.500419] env[61868]: DEBUG nova.scheduler.client.report [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2782.516097] env[61868]: DEBUG oslo_concurrency.lockutils [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.918s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2782.516659] env[61868]: ERROR nova.compute.manager [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2782.516659] env[61868]: Faults: ['InvalidArgument'] [ 2782.516659] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Traceback (most recent call last): [ 2782.516659] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2782.516659] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] self.driver.spawn(context, instance, image_meta, [ 2782.516659] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2782.516659] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2782.516659] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2782.516659] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] self._fetch_image_if_missing(context, vi) [ 2782.516659] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2782.516659] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] image_cache(vi, tmp_image_ds_loc) [ 2782.516659] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2782.516659] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] vm_util.copy_virtual_disk( [ 2782.516659] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2782.516659] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] session._wait_for_task(vmdk_copy_task) [ 2782.516659] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2782.516659] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] return self.wait_for_task(task_ref) [ 2782.516659] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2782.516659] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] return evt.wait() [ 2782.516659] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2782.516659] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] result = hub.switch() [ 2782.516659] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2782.516659] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] return self.greenlet.switch() [ 2782.516659] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2782.516659] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] self.f(*self.args, **self.kw) [ 2782.516659] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2782.516659] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] raise exceptions.translate_fault(task_info.error) [ 2782.516659] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2782.516659] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Faults: ['InvalidArgument'] [ 2782.516659] env[61868]: ERROR nova.compute.manager [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] [ 2782.517537] env[61868]: DEBUG nova.compute.utils [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2782.518817] env[61868]: DEBUG nova.compute.manager [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Build of instance 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1 was re-scheduled: A specified parameter was not correct: fileType [ 2782.518817] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2782.519211] env[61868]: DEBUG nova.compute.manager [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2782.519419] env[61868]: DEBUG nova.compute.manager [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2782.519593] env[61868]: DEBUG nova.compute.manager [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2782.519755] env[61868]: DEBUG nova.network.neutron [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2782.795889] env[61868]: DEBUG nova.network.neutron [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2782.811218] env[61868]: INFO nova.compute.manager [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] [instance: 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1] Took 0.29 seconds to deallocate network for instance. [ 2782.910353] env[61868]: INFO nova.scheduler.client.report [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Deleted allocations for instance 7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1 [ 2782.932892] env[61868]: DEBUG oslo_concurrency.lockutils [None req-57094ec4-d981-4fde-8f9b-067a5fc3af99 tempest-ServerAddressesNegativeTestJSON-1197890054 tempest-ServerAddressesNegativeTestJSON-1197890054-project-member] Lock "7f2d6c4b-57ba-4b3e-a1ac-b07044d1cda1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.141s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2790.528614] env[61868]: DEBUG oslo_concurrency.lockutils [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Acquiring lock "1c53c446-6686-4c95-b18e-5cafd08883c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2790.528996] env[61868]: DEBUG oslo_concurrency.lockutils [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Lock "1c53c446-6686-4c95-b18e-5cafd08883c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2790.541229] env[61868]: DEBUG nova.compute.manager [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Starting instance... {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2790.595749] env[61868]: DEBUG oslo_concurrency.lockutils [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2790.596045] env[61868]: DEBUG oslo_concurrency.lockutils [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2790.597573] env[61868]: INFO nova.compute.claims [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2790.670714] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6eadff7-8dea-4ad7-9e58-d02063c119fe {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2790.679534] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a83bae7-142b-4d08-90f3-c90cc686d66d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2790.710235] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-266a3ba3-9e64-4428-be20-bb31e2f792c0 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2790.719093] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb32d34-9c65-4a5a-896d-5c913beb0aa2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2790.734123] env[61868]: DEBUG nova.compute.provider_tree [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2790.744626] env[61868]: DEBUG nova.scheduler.client.report [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2790.760969] env[61868]: DEBUG oslo_concurrency.lockutils [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.165s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2790.761547] env[61868]: DEBUG nova.compute.manager [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Start building networks asynchronously for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2790.821712] env[61868]: DEBUG nova.compute.utils [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Using /dev/sd instead of None {{(pid=61868) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2790.823295] env[61868]: DEBUG nova.compute.manager [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Allocating IP information in the background. {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2790.823483] env[61868]: DEBUG nova.network.neutron [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] allocate_for_instance() {{(pid=61868) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2790.836621] env[61868]: DEBUG nova.compute.manager [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Start building block device mappings for instance. {{(pid=61868) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2790.870928] env[61868]: DEBUG nova.policy [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8eabc38e95dd428dba94dd70eb3c4012', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c02b232bae448b5920abadaabb68c57', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61868) authorize /opt/stack/nova/nova/policy.py:203}} [ 2790.909178] env[61868]: DEBUG nova.compute.manager [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Start spawning the instance on the hypervisor. {{(pid=61868) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2790.935136] env[61868]: DEBUG nova.virt.hardware [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-02-13T12:43:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9cb04cab48cfe256ad81b0c491c612c4',container_format='bare',created_at=2024-02-13T12:42:43Z,direct_url=,disk_format='vmdk',id=790b1826-10c3-4b26-ad5d-ce8b36354025,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-ide',owner='9c5faf0f361a40beaed624347065d69c',properties=ImageMetaProps,protected=,size=50659328,status='active',tags=,updated_at=2024-02-13T12:42:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2790.935389] env[61868]: DEBUG nova.virt.hardware [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Flavor limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2790.935750] env[61868]: DEBUG nova.virt.hardware [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Image limits 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2790.936129] env[61868]: DEBUG nova.virt.hardware [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Flavor pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2790.936307] env[61868]: DEBUG nova.virt.hardware [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Image pref 0:0:0 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2790.936459] env[61868]: DEBUG nova.virt.hardware [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61868) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2790.936674] env[61868]: DEBUG nova.virt.hardware [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2790.936832] env[61868]: DEBUG nova.virt.hardware [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2790.936997] env[61868]: DEBUG nova.virt.hardware [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Got 1 possible topologies {{(pid=61868) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2790.937156] env[61868]: DEBUG nova.virt.hardware [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2790.937326] env[61868]: DEBUG nova.virt.hardware [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61868) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2790.938190] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e15b6784-0c83-4549-baa0-51746d7d2f4c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2790.947830] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-992ac8e2-31ce-4ea6-80ea-c153f093f5c7 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2791.144047] env[61868]: DEBUG nova.network.neutron [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Successfully created port: 5d3e834b-13bf-473b-bb7e-d3c0b800f1fb {{(pid=61868) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2791.795148] env[61868]: DEBUG nova.compute.manager [req-468408b3-073c-4881-96fd-81fd9b989bb3 req-b26467f1-890b-4735-a1bf-c8ef8b331c2c service nova] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Received event network-vif-plugged-5d3e834b-13bf-473b-bb7e-d3c0b800f1fb {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2791.795396] env[61868]: DEBUG oslo_concurrency.lockutils [req-468408b3-073c-4881-96fd-81fd9b989bb3 req-b26467f1-890b-4735-a1bf-c8ef8b331c2c service nova] Acquiring lock "1c53c446-6686-4c95-b18e-5cafd08883c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2791.795549] env[61868]: DEBUG oslo_concurrency.lockutils [req-468408b3-073c-4881-96fd-81fd9b989bb3 req-b26467f1-890b-4735-a1bf-c8ef8b331c2c service nova] Lock "1c53c446-6686-4c95-b18e-5cafd08883c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2791.795726] env[61868]: DEBUG oslo_concurrency.lockutils [req-468408b3-073c-4881-96fd-81fd9b989bb3 req-b26467f1-890b-4735-a1bf-c8ef8b331c2c service nova] Lock "1c53c446-6686-4c95-b18e-5cafd08883c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2791.795900] env[61868]: DEBUG nova.compute.manager [req-468408b3-073c-4881-96fd-81fd9b989bb3 req-b26467f1-890b-4735-a1bf-c8ef8b331c2c service nova] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] No waiting events found dispatching network-vif-plugged-5d3e834b-13bf-473b-bb7e-d3c0b800f1fb {{(pid=61868) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2791.796082] env[61868]: WARNING nova.compute.manager [req-468408b3-073c-4881-96fd-81fd9b989bb3 req-b26467f1-890b-4735-a1bf-c8ef8b331c2c service nova] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Received unexpected event network-vif-plugged-5d3e834b-13bf-473b-bb7e-d3c0b800f1fb for instance with vm_state building and task_state spawning. [ 2791.895151] env[61868]: DEBUG nova.network.neutron [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Successfully updated port: 5d3e834b-13bf-473b-bb7e-d3c0b800f1fb {{(pid=61868) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2791.910328] env[61868]: DEBUG oslo_concurrency.lockutils [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Acquiring lock "refresh_cache-1c53c446-6686-4c95-b18e-5cafd08883c7" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2791.910503] env[61868]: DEBUG oslo_concurrency.lockutils [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Acquired lock "refresh_cache-1c53c446-6686-4c95-b18e-5cafd08883c7" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2791.910656] env[61868]: DEBUG nova.network.neutron [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Building network info cache for instance {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 2791.948880] env[61868]: DEBUG nova.network.neutron [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Instance cache missing network info. {{(pid=61868) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 2792.103673] env[61868]: DEBUG nova.network.neutron [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Updating instance_info_cache with network_info: [{"id": "5d3e834b-13bf-473b-bb7e-d3c0b800f1fb", "address": "fa:16:3e:5b:4a:91", "network": {"id": "f46f3825-11fa-4d74-9647-db9b347ff288", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-639340792-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "6c02b232bae448b5920abadaabb68c57", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d3e834b-13", "ovs_interfaceid": "5d3e834b-13bf-473b-bb7e-d3c0b800f1fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2792.117520] env[61868]: DEBUG oslo_concurrency.lockutils [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Releasing lock "refresh_cache-1c53c446-6686-4c95-b18e-5cafd08883c7" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2792.117895] env[61868]: DEBUG nova.compute.manager [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Instance network_info: |[{"id": "5d3e834b-13bf-473b-bb7e-d3c0b800f1fb", "address": "fa:16:3e:5b:4a:91", "network": {"id": "f46f3825-11fa-4d74-9647-db9b347ff288", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-639340792-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "6c02b232bae448b5920abadaabb68c57", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d3e834b-13", "ovs_interfaceid": "5d3e834b-13bf-473b-bb7e-d3c0b800f1fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61868) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2792.118344] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:4a:91', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4df917f7-847a-4c0e-b0e3-69a52e4a1554', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5d3e834b-13bf-473b-bb7e-d3c0b800f1fb', 'vif_model': 'e1000'}] {{(pid=61868) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2792.126336] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Creating folder: Project (6c02b232bae448b5920abadaabb68c57). Parent ref: group-v18181. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2792.126941] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-19172761-04bb-49ef-b76b-9e5d8ee444f3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2792.137879] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Created folder: Project (6c02b232bae448b5920abadaabb68c57) in parent group-v18181. [ 2792.138074] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Creating folder: Instances. Parent ref: group-v18318. {{(pid=61868) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2792.138319] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-91a5ac0c-4d04-4602-85f3-411fe5a80cff {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2792.148771] env[61868]: INFO nova.virt.vmwareapi.vm_util [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Created folder: Instances in parent group-v18318. [ 2792.149018] env[61868]: DEBUG oslo.service.loopingcall [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61868) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2792.149219] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Creating VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2792.149471] env[61868]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-561f20ea-9b86-4915-8de0-b93c3f7e807d {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2792.169046] env[61868]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2792.169046] env[61868]: value = "task-41258" [ 2792.169046] env[61868]: _type = "Task" [ 2792.169046] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2792.176725] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41258, 'name': CreateVM_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2792.679176] env[61868]: DEBUG oslo_vmware.api [-] Task: {'id': task-41258, 'name': CreateVM_Task, 'duration_secs': 0.314375} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2792.679465] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Created VM on the ESX host {{(pid=61868) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2792.679964] env[61868]: DEBUG oslo_concurrency.lockutils [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Acquiring lock "vmware.get_and_set_vnc_port" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2792.680298] env[61868]: DEBUG oslo_concurrency.lockutils [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Lock "vmware.get_and_set_vnc_port" acquired by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2792.683126] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1dcb5bf-7823-483e-97d8-935324c9e716 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2792.711304] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Reconfiguring VM instance to enable vnc on port - 5900 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1815}} [ 2792.711670] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11978f65-42a4-4df8-ae4b-cc1ab1ca2fe1 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2792.727006] env[61868]: DEBUG oslo_vmware.api [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Waiting for the task: (returnval){ [ 2792.727006] env[61868]: value = "task-41259" [ 2792.727006] env[61868]: _type = "Task" [ 2792.727006] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2792.734999] env[61868]: DEBUG oslo_vmware.api [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Task: {'id': task-41259, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2793.237805] env[61868]: DEBUG oslo_vmware.api [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Task: {'id': task-41259, 'name': ReconfigVM_Task, 'duration_secs': 0.1087} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2793.238256] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Reconfigured VM instance to enable vnc on port - 5900 {{(pid=61868) _get_and_set_vnc_config /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1819}} [ 2793.238329] env[61868]: DEBUG oslo_concurrency.lockutils [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Lock "vmware.get_and_set_vnc_port" "released" by "nova.virt.vmwareapi.vmops.VMwareVMOps._get_and_set_vnc_config" :: held 0.558s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2793.238519] env[61868]: DEBUG oslo_concurrency.lockutils [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2793.238666] env[61868]: DEBUG oslo_concurrency.lockutils [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2793.238990] env[61868]: DEBUG oslo_concurrency.lockutils [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2793.239248] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-205888aa-5705-4961-8a39-ecc9a2012fcc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2793.244164] env[61868]: DEBUG oslo_vmware.api [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Waiting for the task: (returnval){ [ 2793.244164] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]5249f662-6505-e40a-47a1-31984b0d8cbc" [ 2793.244164] env[61868]: _type = "Task" [ 2793.244164] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2793.252732] env[61868]: DEBUG oslo_vmware.api [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]5249f662-6505-e40a-47a1-31984b0d8cbc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2793.754642] env[61868]: DEBUG oslo_concurrency.lockutils [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2793.754908] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Processing image 790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2793.755143] env[61868]: DEBUG oslo_concurrency.lockutils [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2793.755293] env[61868]: DEBUG oslo_concurrency.lockutils [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Acquired lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2793.755475] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2793.755774] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3cf811d5-71de-456c-aabc-8a2ea0252e85 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2793.773676] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2793.773841] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61868) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2793.774610] env[61868]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f098cb27-f14c-4fca-aa79-635542ed1272 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2793.779898] env[61868]: DEBUG oslo_vmware.api [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Waiting for the task: (returnval){ [ 2793.779898] env[61868]: value = "session[523e2a88-3d56-7540-5a68-121a3b0ef814]529e11e5-15b9-19a5-ed8e-b2789692f4d9" [ 2793.779898] env[61868]: _type = "Task" [ 2793.779898] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2793.788881] env[61868]: DEBUG oslo_vmware.api [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Task: {'id': session[523e2a88-3d56-7540-5a68-121a3b0ef814]529e11e5-15b9-19a5-ed8e-b2789692f4d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2793.818940] env[61868]: DEBUG nova.compute.manager [req-f5bbe049-713d-4ced-8e3e-7fec7efa10d5 req-f32ed56a-ca15-45f6-9ab2-dacfbec74417 service nova] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Received event network-changed-5d3e834b-13bf-473b-bb7e-d3c0b800f1fb {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2793.819133] env[61868]: DEBUG nova.compute.manager [req-f5bbe049-713d-4ced-8e3e-7fec7efa10d5 req-f32ed56a-ca15-45f6-9ab2-dacfbec74417 service nova] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Refreshing instance network info cache due to event network-changed-5d3e834b-13bf-473b-bb7e-d3c0b800f1fb. {{(pid=61868) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2793.819457] env[61868]: DEBUG oslo_concurrency.lockutils [req-f5bbe049-713d-4ced-8e3e-7fec7efa10d5 req-f32ed56a-ca15-45f6-9ab2-dacfbec74417 service nova] Acquiring lock "refresh_cache-1c53c446-6686-4c95-b18e-5cafd08883c7" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2793.819715] env[61868]: DEBUG oslo_concurrency.lockutils [req-f5bbe049-713d-4ced-8e3e-7fec7efa10d5 req-f32ed56a-ca15-45f6-9ab2-dacfbec74417 service nova] Acquired lock "refresh_cache-1c53c446-6686-4c95-b18e-5cafd08883c7" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2793.819899] env[61868]: DEBUG nova.network.neutron [req-f5bbe049-713d-4ced-8e3e-7fec7efa10d5 req-f32ed56a-ca15-45f6-9ab2-dacfbec74417 service nova] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Refreshing network info cache for port 5d3e834b-13bf-473b-bb7e-d3c0b800f1fb {{(pid=61868) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 2794.037947] env[61868]: DEBUG nova.network.neutron [req-f5bbe049-713d-4ced-8e3e-7fec7efa10d5 req-f32ed56a-ca15-45f6-9ab2-dacfbec74417 service nova] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Updated VIF entry in instance network info cache for port 5d3e834b-13bf-473b-bb7e-d3c0b800f1fb. {{(pid=61868) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 2794.038354] env[61868]: DEBUG nova.network.neutron [req-f5bbe049-713d-4ced-8e3e-7fec7efa10d5 req-f32ed56a-ca15-45f6-9ab2-dacfbec74417 service nova] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Updating instance_info_cache with network_info: [{"id": "5d3e834b-13bf-473b-bb7e-d3c0b800f1fb", "address": "fa:16:3e:5b:4a:91", "network": {"id": "f46f3825-11fa-4d74-9647-db9b347ff288", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-639340792-network", "subnets": [{"cidr": "10.0.0.0/28", "dns": [], "gateway": {"address": "10.0.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.0.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"dhcp_server": "10.0.0.2"}}], "meta": {"injected": false, "tenant_id": "6c02b232bae448b5920abadaabb68c57", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d3e834b-13", "ovs_interfaceid": "5d3e834b-13bf-473b-bb7e-d3c0b800f1fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2794.048931] env[61868]: DEBUG oslo_concurrency.lockutils [req-f5bbe049-713d-4ced-8e3e-7fec7efa10d5 req-f32ed56a-ca15-45f6-9ab2-dacfbec74417 service nova] Releasing lock "refresh_cache-1c53c446-6686-4c95-b18e-5cafd08883c7" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2794.290194] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Preparing fetch location {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2794.290561] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Creating directory with path [datastore2] vmware_temp/89b5d070-0838-4610-af0d-1b86677c4a70/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2794.290621] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d26d32e5-29f2-4f91-8a32-7847ce7256e3 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2794.311195] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Created directory with path [datastore2] vmware_temp/89b5d070-0838-4610-af0d-1b86677c4a70/790b1826-10c3-4b26-ad5d-ce8b36354025 {{(pid=61868) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2794.311411] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Fetch image to [datastore2] vmware_temp/89b5d070-0838-4610-af0d-1b86677c4a70/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2794.311601] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to [datastore2] vmware_temp/89b5d070-0838-4610-af0d-1b86677c4a70/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2794.312484] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3112eb0-2b7b-4542-a4ba-dd6858a8429f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2794.319834] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97b68558-769a-476b-9443-5f89121585d6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2794.329276] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f50daa10-8ed6-4208-8769-9a679af9ac33 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2794.360215] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c41cdcc9-9cc6-4022-8542-af6ef11524dc {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2794.366427] env[61868]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d03e7092-6315-479a-a819-58078788cb2a {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2794.389183] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Downloading image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2794.437053] env[61868]: DEBUG oslo_vmware.rw_handles [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Creating HTTP connection to write to file with size = 50659328 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/89b5d070-0838-4610-af0d-1b86677c4a70/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2794.492604] env[61868]: DEBUG oslo_vmware.rw_handles [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Completed reading data from the image iterator. {{(pid=61868) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2794.492803] env[61868]: DEBUG oslo_vmware.rw_handles [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/89b5d070-0838-4610-af0d-1b86677c4a70/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61868) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2827.351889] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2834.353672] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2834.354157] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Starting heal instance info cache {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2834.354157] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Rebuilding the list of instances to heal {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9936}} [ 2834.365151] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Skipping network cache update for instance because it is Building. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 2834.365338] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Didn't find any instances for network info cache update. {{(pid=61868) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10018}} [ 2835.358426] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2836.352017] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2837.351388] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2837.364032] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2837.364032] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2837.364032] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2837.364032] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61868) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2837.364524] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a117f376-9954-49b1-8325-a5e9caacbf2c {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2837.373929] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb26e26-17c7-44d6-b487-94d5184cd0bb {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2837.387933] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87fdeee8-8fdf-466e-96c7-85ec847d5a51 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2837.394428] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca9b2cf7-dd6e-4522-ad66-c7ee9cec5164 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2837.422731] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181934MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61868) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2837.422889] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2837.423058] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2837.462935] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Instance 1c53c446-6686-4c95-b18e-5cafd08883c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61868) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2837.463126] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2837.463271] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=640MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=61868) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2837.492289] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0be044c9-6e7b-409b-9818-943b32ede0de {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2837.499836] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3e18bdf-b97d-4df7-a82c-cfdd024097a4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2837.530107] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ad416e-8a83-4ba7-a1be-093c310868b6 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2837.537437] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e95e3b23-8f8e-459f-a889-cc6e2e79f182 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2837.550099] env[61868]: DEBUG nova.compute.provider_tree [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2837.558303] env[61868]: DEBUG nova.scheduler.client.report [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2837.575126] env[61868]: DEBUG nova.compute.resource_tracker [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61868) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2837.575310] env[61868]: DEBUG oslo_concurrency.lockutils [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.152s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2839.576061] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2840.347601] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2841.351182] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2841.351619] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2841.410567] env[61868]: WARNING oslo_vmware.rw_handles [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2841.410567] env[61868]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2841.410567] env[61868]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2841.410567] env[61868]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2841.410567] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2841.410567] env[61868]: ERROR oslo_vmware.rw_handles response.begin() [ 2841.410567] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2841.410567] env[61868]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2841.410567] env[61868]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2841.410567] env[61868]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2841.410567] env[61868]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2841.410567] env[61868]: ERROR oslo_vmware.rw_handles [ 2841.411094] env[61868]: DEBUG nova.virt.vmwareapi.images [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Downloaded image file data 790b1826-10c3-4b26-ad5d-ce8b36354025 to vmware_temp/89b5d070-0838-4610-af0d-1b86677c4a70/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk on the data store datastore2 {{(pid=61868) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2841.413040] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Caching image {{(pid=61868) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2841.413285] env[61868]: DEBUG nova.virt.vmwareapi.vm_util [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Copying Virtual Disk [datastore2] vmware_temp/89b5d070-0838-4610-af0d-1b86677c4a70/790b1826-10c3-4b26-ad5d-ce8b36354025/tmp-sparse.vmdk to [datastore2] vmware_temp/89b5d070-0838-4610-af0d-1b86677c4a70/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk {{(pid=61868) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2841.413566] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3964d32f-32eb-4d52-ae3c-5fe2a9e764b4 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2841.421787] env[61868]: DEBUG oslo_vmware.api [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Waiting for the task: (returnval){ [ 2841.421787] env[61868]: value = "task-41260" [ 2841.421787] env[61868]: _type = "Task" [ 2841.421787] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2841.429689] env[61868]: DEBUG oslo_vmware.api [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Task: {'id': task-41260, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2841.931994] env[61868]: DEBUG oslo_vmware.exceptions [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Fault InvalidArgument not matched. {{(pid=61868) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2841.932292] env[61868]: DEBUG oslo_concurrency.lockutils [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Releasing lock "[datastore2] devstack-image-cache_base/790b1826-10c3-4b26-ad5d-ce8b36354025/790b1826-10c3-4b26-ad5d-ce8b36354025.vmdk" {{(pid=61868) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2841.932911] env[61868]: ERROR nova.compute.manager [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2841.932911] env[61868]: Faults: ['InvalidArgument'] [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Traceback (most recent call last): [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] yield resources [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] self.driver.spawn(context, instance, image_meta, [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] self._fetch_image_if_missing(context, vi) [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] image_cache(vi, tmp_image_ds_loc) [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] vm_util.copy_virtual_disk( [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] session._wait_for_task(vmdk_copy_task) [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] return self.wait_for_task(task_ref) [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] return evt.wait() [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] result = hub.switch() [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] return self.greenlet.switch() [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] self.f(*self.args, **self.kw) [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] raise exceptions.translate_fault(task_info.error) [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Faults: ['InvalidArgument'] [ 2841.932911] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] [ 2841.935401] env[61868]: INFO nova.compute.manager [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Terminating instance [ 2841.936184] env[61868]: DEBUG nova.compute.manager [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Start destroying the instance on the hypervisor. {{(pid=61868) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2841.936379] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Destroying instance {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2841.937133] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6944159d-77bd-4ad5-8c11-e9c579a0d13b {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2841.944125] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Unregistering the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2841.944398] env[61868]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a4f2832d-a223-411b-98d0-d9e1290caa6f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2842.006241] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Unregistered the VM {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2842.006517] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Deleting contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2842.006600] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Deleting the datastore file [datastore2] 1c53c446-6686-4c95-b18e-5cafd08883c7 {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2842.006869] env[61868]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1fd6f9f7-a8f4-4d20-92f8-8aa1c6dc429e {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2842.015459] env[61868]: DEBUG oslo_vmware.api [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Waiting for the task: (returnval){ [ 2842.015459] env[61868]: value = "task-41262" [ 2842.015459] env[61868]: _type = "Task" [ 2842.015459] env[61868]: } to complete. {{(pid=61868) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2842.023170] env[61868]: DEBUG oslo_vmware.api [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Task: {'id': task-41262, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2842.351276] env[61868]: DEBUG oslo_service.periodic_task [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61868) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2842.351767] env[61868]: DEBUG nova.compute.manager [None req-12a1a4ef-715d-4eb4-915a-97aa87f958d9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61868) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10551}} [ 2842.525205] env[61868]: DEBUG oslo_vmware.api [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Task: {'id': task-41262, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064036} completed successfully. {{(pid=61868) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2842.525563] env[61868]: DEBUG nova.virt.vmwareapi.ds_util [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Deleted the datastore file {{(pid=61868) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2842.525746] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Deleted contents of the VM from datastore datastore2 {{(pid=61868) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2842.525918] env[61868]: DEBUG nova.virt.vmwareapi.vmops [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Instance destroyed {{(pid=61868) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2842.526087] env[61868]: INFO nova.compute.manager [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Took 0.59 seconds to destroy the instance on the hypervisor. [ 2842.528161] env[61868]: DEBUG nova.compute.claims [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Aborting claim: {{(pid=61868) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2842.528335] env[61868]: DEBUG oslo_concurrency.lockutils [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2842.528549] env[61868]: DEBUG oslo_concurrency.lockutils [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2842.594810] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39129fdb-78e8-4bc3-8a15-99d1c301153f {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2842.602603] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4f49176-68ed-49f4-94b7-ecf9712829e2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2842.632329] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af4e9cbe-74ca-42b1-b2aa-9d38759c35a2 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2842.639957] env[61868]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38201639-8acd-4949-9306-6d0f31f13390 {{(pid=61868) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2842.652935] env[61868]: DEBUG nova.compute.provider_tree [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Inventory has not changed in ProviderTree for provider: 6539a0d3-09f9-481f-a837-7ea10081c3cc {{(pid=61868) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2842.661626] env[61868]: DEBUG nova.scheduler.client.report [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Inventory has not changed for provider 6539a0d3-09f9-481f-a837-7ea10081c3cc based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61868) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2842.676667] env[61868]: DEBUG oslo_concurrency.lockutils [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.148s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2842.677197] env[61868]: ERROR nova.compute.manager [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2842.677197] env[61868]: Faults: ['InvalidArgument'] [ 2842.677197] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Traceback (most recent call last): [ 2842.677197] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2842.677197] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] self.driver.spawn(context, instance, image_meta, [ 2842.677197] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2842.677197] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2842.677197] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2842.677197] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] self._fetch_image_if_missing(context, vi) [ 2842.677197] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2842.677197] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] image_cache(vi, tmp_image_ds_loc) [ 2842.677197] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2842.677197] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] vm_util.copy_virtual_disk( [ 2842.677197] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2842.677197] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] session._wait_for_task(vmdk_copy_task) [ 2842.677197] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2842.677197] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] return self.wait_for_task(task_ref) [ 2842.677197] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2842.677197] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] return evt.wait() [ 2842.677197] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2842.677197] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] result = hub.switch() [ 2842.677197] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2842.677197] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] return self.greenlet.switch() [ 2842.677197] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2842.677197] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] self.f(*self.args, **self.kw) [ 2842.677197] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2842.677197] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] raise exceptions.translate_fault(task_info.error) [ 2842.677197] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2842.677197] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Faults: ['InvalidArgument'] [ 2842.677197] env[61868]: ERROR nova.compute.manager [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] [ 2842.678212] env[61868]: DEBUG nova.compute.utils [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] VimFaultException {{(pid=61868) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2842.679440] env[61868]: DEBUG nova.compute.manager [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Build of instance 1c53c446-6686-4c95-b18e-5cafd08883c7 was re-scheduled: A specified parameter was not correct: fileType [ 2842.679440] env[61868]: Faults: ['InvalidArgument'] {{(pid=61868) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2842.679840] env[61868]: DEBUG nova.compute.manager [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Unplugging VIFs for instance {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2842.680030] env[61868]: DEBUG nova.compute.manager [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61868) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2842.680207] env[61868]: DEBUG nova.compute.manager [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Deallocating network for instance {{(pid=61868) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2842.680400] env[61868]: DEBUG nova.network.neutron [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] deallocate_for_instance() {{(pid=61868) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2842.947595] env[61868]: DEBUG nova.network.neutron [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Updating instance_info_cache with network_info: [] {{(pid=61868) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2842.960629] env[61868]: INFO nova.compute.manager [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] [instance: 1c53c446-6686-4c95-b18e-5cafd08883c7] Took 0.28 seconds to deallocate network for instance. [ 2843.062144] env[61868]: INFO nova.scheduler.client.report [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Deleted allocations for instance 1c53c446-6686-4c95-b18e-5cafd08883c7 [ 2843.083673] env[61868]: DEBUG oslo_concurrency.lockutils [None req-33127176-2d89-4c82-92b5-5495a348db5c tempest-ServersNegativeTestMultiTenantJSON-246455800 tempest-ServersNegativeTestMultiTenantJSON-246455800-project-member] Lock "1c53c446-6686-4c95-b18e-5cafd08883c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.555s {{(pid=61868) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}}